diff --git a/.github/workflows/container_app_pr.yml b/.github/workflows/container_app_pr.yml index c3f9e7bdc0d..4a06cb567b0 100644 --- a/.github/workflows/container_app_pr.yml +++ b/.github/workflows/container_app_pr.yml @@ -35,14 +35,14 @@ jobs: # Note: Accessing, pushing tags etc. to GHCR will only succeed in upstream because secrets. - name: Login to Github Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ secrets.GHCR_USERNAME }} password: ${{ secrets.GHCR_TOKEN }} - name: Set up QEMU for multi-arch builds - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 # Get the image tag from either the command or default to branch name (Not used for now) #- name: Get the target tag name diff --git a/.github/workflows/container_app_push.yml b/.github/workflows/container_app_push.yml index afb4f6f874b..71ffffb5f48 100644 --- a/.github/workflows/container_app_push.yml +++ b/.github/workflows/container_app_push.yml @@ -126,20 +126,20 @@ jobs: # Depending on context, we push to different targets. Login accordingly. - if: github.event_name != 'pull_request' name: Log in to Docker Hub registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - if: ${{ github.event_name == 'pull_request' }} name: Login to Github Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ secrets.GHCR_USERNAME }} password: ${{ secrets.GHCR_TOKEN }} - name: Set up QEMU for multi-arch builds - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Re-set image tag based on branch (if master) if: ${{ github.ref_name == 'master' }} diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index eca8416732a..ebf79275bec 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -68,7 +68,7 @@ jobs: overwrite: true - name: Execute payara war deployment remotely - uses: appleboy/ssh-action@v1.0.0 + uses: appleboy/ssh-action@v1.2.0 env: INPUT_WAR_FILE: ${{ env.war_file }} with: diff --git a/README.md b/README.md index 77720453d5f..2303c001d2c 100644 --- a/README.md +++ b/README.md @@ -1,20 +1,81 @@ Dataverse® =============== -Dataverse is an [open source][] software platform for sharing, finding, citing, and preserving research data (developed by the [Dataverse team](https://dataverse.org/about) at the [Institute for Quantitative Social Science](https://iq.harvard.edu/) and the [Dataverse community][]). +![Dataverse-logo](https://github.com/IQSS/dataverse-frontend/assets/7512607/6c4d79e4-7be5-4102-88bd-dfa167dc79d3) -[dataverse.org][] is our home on the web and shows a map of Dataverse installations around the world, a list of [features][], [integrations][] that have been made possible through [REST APIs][], our [project board][], our development [roadmap][], and more. +## Table of Contents -We maintain a demo site at [demo.dataverse.org][] which you are welcome to use for testing and evaluating Dataverse. +1. [โ“ What is Dataverse?](#what-is-dataverse) +2. [โœ” Try Dataverse](#try-dataverse) +3. [๐ŸŒ Features, Integrations, Roadmaps, and More](#website) +4. [๐Ÿ“ฅ Installation](#installation) +5. [๐Ÿ˜ Community and Support](#community-and-support) +6. [๐Ÿง‘โ€๐Ÿ’ป๏ธ Contributing](#contributing) +7. [โš–๏ธ Legal Information](#legal-informations) -To install Dataverse, please see our [Installation Guide][] which will prompt you to download our [latest release][]. Docker users should consult the [Container Guide][]. + -To discuss Dataverse with the community, please join our [mailing list][], participate in a [community call][], chat with us at [chat.dataverse.org][], or attend our annual [Dataverse Community Meeting][]. +## โ“ What is Dataverse? -We love contributors! Please see our [Contributing Guide][] for ways you can help. +Welcome to Dataverseยฎ, the [open source][] software platform designed for sharing, finding, citing, and preserving research data. Developed by the Dataverse team at the [Institute for Quantitative Social Science](https://iq.harvard.edu/) and the [Dataverse community][], our platform makes it easy for research organizations to host, manage, and share their data with the world. + + + +## โœ” Try Dataverse + +We invite you to explore our demo site at [demo.dataverse.org][]. This site is ideal for testing and evaluating Dataverse in a risk-free environment. + + + +## ๐ŸŒ Features, Integrations, Roadmaps, and More + +Visit [dataverse.org][], our home on the web, for a comprehensive overview of Dataverse. Here, you will find: + +- An interactive map showcasing Dataverse installations worldwide. +- A detailed list of [features][]. +- Information on [integrations][] that have been made possible through our [REST APIs][]. +- Our [project board][] and development [roadmap][]. +- News, events, and more. + + + +## ๐Ÿ“ฅ Installation + +Ready to get started? Follow our [Installation Guide][] to download and install the latest release of Dataverse. + +If you are using Docker, please refer to our [Container Guide][] for detailed instructions. + + + +## ๐Ÿ˜ Community and Support + +Engage with the vibrant Dataverse community through various channels: + +- **[Mailing List][]**: Join the conversation on our [mailing list][]. +- **[Community Calls][]**: Participate in our regular [community calls][] to discuss new features, ask questions, and share your experiences. +- **[Chat][]**: Connect with us and other users in real-time at [dataverse.zulipchat.com][]. +- **[Dataverse Community Meeting][]**: Attend our annual [Dataverse Community Meeting][] to network, learn, and collaborate with peers and experts. +- **[DataverseTV][]**: Watch the video content from the Dataverse community on [DataverseTV][] and on [Harvard's IQSS YouTube channel][]. + + +## ๐Ÿง‘โ€๐Ÿ’ป๏ธ Contribute to Dataverse + +We love contributors! Whether you are a developer, researcher, or enthusiast, there are many ways you can help. + +Visit our [Contributing Guide][] to learn how you can get involved. + +Join us in building and enhancing Dataverse to make research data more accessible and impactful. Your support and participation are crucial to our success! + + +## โš–๏ธ Legal Information Dataverse is a trademark of President and Fellows of Harvard College and is registered in the United States. +--- +For more detailed information, visit our website at [dataverse.org][]. + +Feel free to [reach out] with any questions or feedback. Happy researching! + [![Dataverse Project logo](src/main/webapp/resources/images/dataverseproject_logo.jpg "Dataverse Project")](http://dataverse.org) [![API Test Status](https://jenkins.dataverse.org/buildStatus/icon?job=IQSS-dataverse-develop&subject=API%20Test%20Status)](https://jenkins.dataverse.org/job/IQSS-dataverse-develop/) @@ -37,6 +98,11 @@ Dataverse is a trademark of President and Fellows of Harvard College and is regi [Contributing Guide]: CONTRIBUTING.md [mailing list]: https://groups.google.com/group/dataverse-community [community call]: https://dataverse.org/community-calls -[chat.dataverse.org]: https://chat.dataverse.org +[Chat]: https://dataverse.zulipchat.com +[dataverse.zulipchat.com]: https://dataverse.zulipchat.com [Dataverse Community Meeting]: https://dataverse.org/events [open source]: LICENSE.md +[community calls]: https://dataverse.org/community-calls +[DataverseTV]: https://dataverse.org/dataversetv +[Harvard's IQSS YouTube channel]: https://www.youtube.com/@iqssatharvarduniversity8672 +[reach out]: https://dataverse.org/contact diff --git a/conf/solr/schema.xml b/conf/solr/schema.xml index f4121de97c1..543bdc698ec 100644 --- a/conf/solr/schema.xml +++ b/conf/solr/schema.xml @@ -254,6 +254,21 @@ WARNING: Do not remove the following include guards if you intend to use the neat helper scripts we provide. --> + + + + + + + + + + + + + + + @@ -277,38 +292,38 @@ - - - + + + - - - - + + + + - - - + + + - - + + - + - - - + + + - + @@ -316,7 +331,7 @@ - + @@ -328,12 +343,12 @@ - + - + @@ -353,19 +368,19 @@ - + - + - + @@ -389,28 +404,28 @@ - - + + - + - + - - + + + - @@ -496,6 +511,21 @@ WARNING: Do not remove the following include guards if you intend to use the neat helper scripts we provide. --> + + + + + + + + + + + + + + + @@ -538,12 +568,12 @@ - + @@ -574,8 +604,8 @@ - + @@ -599,9 +629,9 @@ - + @@ -631,13 +661,13 @@ - - + + - + @@ -649,10 +679,10 @@ + - @@ -819,7 +849,9 @@ - + + + diff --git a/doc/release-notes/10108-stata-version-detected-in-direct-upload.md b/doc/release-notes/10108-stata-version-detected-in-direct-upload.md new file mode 100644 index 00000000000..a69680e1c48 --- /dev/null +++ b/doc/release-notes/10108-stata-version-detected-in-direct-upload.md @@ -0,0 +1 @@ +The version of Stata files is now detected during S3 direct upload (as it was for normal uploads), allowing ingest of Stata 14 and 15 files that have been uploaded directly. See [the guides](https://dataverse-guide--11054.org.readthedocs.build/en/11054/developers/big-data-support.html#features-that-are-disabled-if-s3-direct-upload-is-enabled), #10108, and #11054. diff --git a/doc/release-notes/10171-exlude-metadatablocks.md b/doc/release-notes/10171-exlude-metadatablocks.md new file mode 100644 index 00000000000..7c0a9b030eb --- /dev/null +++ b/doc/release-notes/10171-exlude-metadatablocks.md @@ -0,0 +1,4 @@ +Extension of API `{id}/versions` and `{id}/versions/{versionId}` with an optional ``excludeMetadataBlocks`` parameter, +that specifies whether the metadataBlocks should be listed in the output. It defaults to ``false``, preserving backward +compatibility. (Note that for a dataset with a large number of versions and/or metadataBlocks having the metadata blocks +included can dramatically increase the volume of the output). See also [the guides](https://dataverse-guide--10778.org.readthedocs.build/en/10778/api/native-api.html#list-versions-of-a-dataset), #10778, and #10171. diff --git a/doc/release-notes/10241-new-solr-client.md b/doc/release-notes/10241-new-solr-client.md new file mode 100644 index 00000000000..67ccdd4f184 --- /dev/null +++ b/doc/release-notes/10241-new-solr-client.md @@ -0,0 +1,9 @@ +[HttpSolrClient](https://solr.apache.org/docs/9_4_1/solrj/org/apache/solr/client/solrj/impl/HttpSolrClient.html) is deprecated as of Solr 9, and which will be removed in a future major release of Solr. It's recommended to use [Http2SolrClient](https://solr.apache.org/docs/9_4_1/solrj/org/apache/solr/client/solrj/impl/Http2SolrClient.html) instead. + +[Solr documentation](https://solr.apache.org/guide/solr/latest/deployment-guide/solrj.html#types-of-solrclients) describe it as a _async, non-blocking and general-purpose client that leverage HTTP/2 using the Jetty Http library_. + +With Solr 9.4.1, the Http2SolrClient is indicate as experimental. But since the 9.6 version of Solr, this mention is no longer maintained. + +The ConcurrentUpdateHttp2SolrClient is now also used in some cases, which is supposed to be more efficient for indexing. + +For more information, see issue [#10161](https://github.com/IQSS/dataverse/issues/10161) and pull request [#10241](https://github.com/IQSS/dataverse/pull/10241) diff --git a/doc/release-notes/10304-add-move-dataverse-collection.md b/doc/release-notes/10304-add-move-dataverse-collection.md new file mode 100644 index 00000000000..1ddeb526c61 --- /dev/null +++ b/doc/release-notes/10304-add-move-dataverse-collection.md @@ -0,0 +1,5 @@ +### Move a collection from the dashboard + +In addition to the api [Move a Dataverse Collection](https://guides.dataverse.org/en/latest/admin/dataverses-datasets.html#move-a-dataverse-collection), it is now possible for a Dataverse administrator to move a collection from the Dataverse dashboard. + +For more information, see #10304. \ No newline at end of file diff --git a/doc/release-notes/10320-cookie-consent.md b/doc/release-notes/10320-cookie-consent.md new file mode 100644 index 00000000000..a3fd7f5466b --- /dev/null +++ b/doc/release-notes/10320-cookie-consent.md @@ -0,0 +1,3 @@ +## Cookie Consent Popup (GDPR) + +For compliance with GDPR and other privacy regulations, advice on adding a cookie consent popup has been added to the guides. See the new [cookie consent](https://dataverse-guide--10320.org.readthedocs.build/en/10320/installation/config.html#adding-cookie-consent-for-gdpr-etc) section and #10320. diff --git a/doc/release-notes/10340-forbidden.md b/doc/release-notes/10340-forbidden.md new file mode 100644 index 00000000000..5997f717d64 --- /dev/null +++ b/doc/release-notes/10340-forbidden.md @@ -0,0 +1,3 @@ +### Backward Incompatible Changes + +The [Show Role](https://dataverse-guide--11116.org.readthedocs.build/en/11116/api/native-api.html#show-role) API endpoint was returning 401 Unauthorized when a permission check failed. This has been corrected to return 403 Forbidden instead. That is, the API token is known to be good (401 otherwise) but the user lacks permission (403 is now sent). See also the [API Changelog](https://dataverse-guide--11116.org.readthedocs.build/en/11116/api/changelog.html), #10340, and #11116. diff --git a/doc/release-notes/10384-link.md b/doc/release-notes/10384-link.md new file mode 100644 index 00000000000..7092241adf4 --- /dev/null +++ b/doc/release-notes/10384-link.md @@ -0,0 +1,3 @@ +### Broken Link in Email When Users Request Access to Files + +When users request access to a files, the people who have permission to grant access receive an email with a link in it that didn't work due to a trailing period (full stop) right next to the link (e.g. `https://demo.dataverse.org/permissions-manage-files.xhtml?id=9.`) A space has been added to fix this. See #10384 and #11115. diff --git a/doc/release-notes/10472-review-modify-jsonprinter-for-datasetfieldtype.md b/doc/release-notes/10472-review-modify-jsonprinter-for-datasetfieldtype.md new file mode 100644 index 00000000000..f0b9c30c9cd --- /dev/null +++ b/doc/release-notes/10472-review-modify-jsonprinter-for-datasetfieldtype.md @@ -0,0 +1,8 @@ +### Json Printer Bug fix + +DatasetFieldTypes in MetadataBlock response that are also a child of another DatasetFieldType were being returned twice. The child DatasetFieldType was included in the "fields" object as well as in the "childFields" of it's parent DatasetFieldType. This fix suppresses the standalone object so only one instance of the DatasetFieldType is returned (in the "childFields" of its parent). +This fix changes the Json output of the API `/api/dataverses/{dataverseAlias}/metadatablocks` + +## Backward Incompatible Changes + +The Json response of API call `/api/dataverses/{dataverseAlias}/metadatablocks` will no longer include the DatasetFieldTypes in "fields" if they are children of another DatasetFieldType. The child DatasetFieldType will only be included in the "childFields" of it's parent DatasetFieldType. diff --git a/doc/release-notes/10516_legacy_permalink_config_fix.md b/doc/release-notes/10516_legacy_permalink_config_fix.md new file mode 100644 index 00000000000..d78395252d4 --- /dev/null +++ b/doc/release-notes/10516_legacy_permalink_config_fix.md @@ -0,0 +1 @@ +Support for legacy configuration of a PermaLink PID provider, e.g. using the :Protocol,:Authority, and :Shoulder settings, is fixed. \ No newline at end of file diff --git a/doc/release-notes/10519-dataset-types.md b/doc/release-notes/10519-dataset-types.md new file mode 100644 index 00000000000..99cf79a796f --- /dev/null +++ b/doc/release-notes/10519-dataset-types.md @@ -0,0 +1,12 @@ +## Dataset Types can be linked to Metadata Blocks + +Metadata blocks (e.g. "CodeMeta") can now be linked to dataset types (e.g. "software") using new superuser APIs. + +This will have the following effects for the APIs used by the new Dataverse UI ( https://github.com/IQSS/dataverse-frontend ): + +- The list of fields shown when creating a dataset will include fields marked as "displayoncreate" (in the tsv/database) for metadata blocks (e.g. "CodeMeta") that are linked to the dataset type (e.g. "software") that is passed to the API. +- The metadata blocks shown when editing a dataset will include metadata blocks (e.g. "CodeMeta") that are linked to the dataset type (e.g. "software") that is passed to the API. + +Mostly in order to write automated tests for the above, a [displayOnCreate](https://dataverse-guide--11001.org.readthedocs.build/en/11001/api/native-api.html#set-displayoncreate-for-a-dataset-field) API endpoint has been added. + +For more information, see the guides ([overview](https://dataverse-guide--11001.org.readthedocs.build/en/11001/user/dataset-management.html#dataset-types), [new APIs](https://dataverse-guide--11001.org.readthedocs.build/en/11001/api/native-api.html#link-dataset-type-with-metadata-blocks)), #10519 and #11001. diff --git a/doc/release-notes/10542-signposting.md b/doc/release-notes/10542-signposting.md new file mode 100644 index 00000000000..f847e6ba94a --- /dev/null +++ b/doc/release-notes/10542-signposting.md @@ -0,0 +1,11 @@ +# Signposting Output Now Contains Links to All Dataset Metadata Export Formats + +When Signposting was added in Dataverse 5.14 (#8981), it only provided links for the `schema.org` metadata export format. + +The output of HEAD, GET, and the Signposting "linkset" API have all been updated to include links to all available dataset metadata export formats (including any external exporters, such as Croissant, that have been enabled). + +This provides a lightweight machine-readable way to first retrieve a list of links (via a HTTP HEAD request, for example) to each available metadata export format and then follow up with a request for the export format of interest. + +In addition, the content type for the `schema.org` dataset metadata export format has been corrected. It was `application/json` and now it is `application/ld+json`. + +See also [the docs](https://preview.guides.gdcc.io/en/develop/api/native-api.html#retrieve-signposting-information) and #10542. diff --git a/doc/release-notes/10714-access-requests-missing-since-upgrade-v6-0.md b/doc/release-notes/10714-access-requests-missing-since-upgrade-v6-0.md new file mode 100644 index 00000000000..a220c150791 --- /dev/null +++ b/doc/release-notes/10714-access-requests-missing-since-upgrade-v6-0.md @@ -0,0 +1,6 @@ +### Flyway Script added to Fix File Access Requests when upgrading from Dataverse 6.0 + +Database update script added to prevent duplicate keys when upgrading from V6.0 +This script will delete access requests made after the initial request and will set the initial request to "Created" + +See: https://github.com/IQSS/dataverse/issues/10714 diff --git a/doc/release-notes/10790-fix pid handling in exporters and citations.md b/doc/release-notes/10790-fix pid handling in exporters and citations.md new file mode 100644 index 00000000000..da430ed9671 --- /dev/null +++ b/doc/release-notes/10790-fix pid handling in exporters and citations.md @@ -0,0 +1,16 @@ +### Improvements to PID formatting in exports and citations + +Multiple small issues with the formatting of PIDs in the +DDI exporters, and EndNote and BibTeX citation formats have +been addressed. These should improve the ability to import +Dataverse citations into reference managers and fix potential +issues harvesting datasets using PermaLinks. + +Backward Incompatibility + +Changes to PID formatting occur in the DDI/DDI Html export formats +and the EndNote and BibTex citation formats. These changes correct +errors and improve conformance with best practices but could break +parsing of these formats. + +For more information, see #10790. diff --git a/doc/release-notes/10809-oai-ore-nested-compound.md b/doc/release-notes/10809-oai-ore-nested-compound.md new file mode 100644 index 00000000000..d67f2b65592 --- /dev/null +++ b/doc/release-notes/10809-oai-ore-nested-compound.md @@ -0,0 +1 @@ +The OAI-ORE exporter can now export metadata containing nested compound fields (i.e. compound fields within compound fields). See #10809 and #11190. diff --git a/doc/release-notes/10887-solr-field-types.md b/doc/release-notes/10887-solr-field-types.md new file mode 100644 index 00000000000..1afbae34e90 --- /dev/null +++ b/doc/release-notes/10887-solr-field-types.md @@ -0,0 +1,102 @@ +This release enhances how numerical and date fields are indexed in Solr. Previously, all fields were indexed as English text (text_en), but with this update: + +* Integer fields are indexed as `plong` +* Float fields are indexed as `pdouble` +* Date fields are indexed as `date_range` (`solr.DateRangeField`) + +Specifically, the following fields were updated: + +- coverage.Depth +- coverage.ObjectCount +- coverage.ObjectDensity +- coverage.Redshift.MaximumValue +- coverage.Redshift.MinimumValue +- coverage.RedshiftValue +- coverage.SkyFraction +- coverage.Spectral.CentralWavelength +- coverage.Spectral.MaximumWavelength +- coverage.Spectral.MinimumWavelength +- coverage.Temporal.StartTime +- coverage.Temporal.StopTime +- dateOfCollectionEnd +- dateOfCollectionStart +- dateOfDeposit +- distributionDate +- dsDescriptionDate +- journalPubDate +- productionDate +- resolution.Redshift +- targetSampleActualSize +- timePeriodCoveredEnd +- timePeriodCoveredStart + +This change enables range queries when searching from both the UI and the API, such as `dateOfDeposit:[2000-01-01 TO 2014-12-31]` or `targetSampleActualSize:[25 TO 50]`. + +Dataverse administrators must update their Solr schema.xml (manually or by rerunning `update-fields.sh`) and reindex all datasets. + +Additionally, search result highlighting is now more accurate, ensuring that only fields relevant to the query are highlighted in search results. If the query is specifically limited to certain fields, the highlighting is now limited to those fields as well. + +## Upgrade Instructions + +7\. Update Solr schema.xml file. Start with the standard v6.5 schema.xml, then, if your installation uses any custom or experimental metadata blocks, update it to include the extra fields (step 7a). + +Stop Solr (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.5/installation/prerequisites.html#solr-init-script)). + +```shell +service solr stop +``` + +Replace schema.xml + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.5/conf/solr/schema.xml +cp schema.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf +``` + +Start Solr (but if you use any custom metadata blocks, perform the next step, 7a first). + +```shell +service solr start +``` + +7a\. For installations with custom or experimental metadata blocks: + +Before starting Solr, update the schema to include all the extra metadata fields that your installation uses. We do this by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed to reflect the names of the directories, if different): + +```shell + wget https://raw.githubusercontent.com/IQSS/dataverse/v6.5/conf/solr/update-fields.sh + chmod +x update-fields.sh + curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-9.4.1/server/solr/collection1/conf/schema.xml +``` + +Now start Solr. + +8\. Reindex Solr + +Note: these instructions are a little different than usual because we observed a strange error about `DOCS_AND_FREQS_AND_POSITIONS` when testing upgrades (see #11139 for details). Extra steps about explicitly clearing the index and reloading the core are included. If you run into trouble, as a last resort, you could reinstall Solr completely and then reindex. + +Clear the Solr index: + +```shell +curl http://localhost:8080/api/admin/index/clear +``` + +Make sure the Solr index is empty: + +```shell +curl "http://localhost:8983/solr/collection1/select?rows=1000000&wt=json&indent=true&q=*%3A*" +``` + +Reload the Solr core: + +```shell +curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1" +``` + +Below is the simplest way to reindex Solr: + +```shell +curl http://localhost:8080/api/admin/index +``` + +The API above rebuilds the existing index "in place". If you want to be absolutely sure that your index is up-to-date and consistent, you may consider wiping it clean and reindexing everything from scratch (see [the guides](https://guides.dataverse.org/en/latest/admin/solr-search-index.html)). Just note that, depending on the size of your database, a full reindex may take a while and the users will be seeing incomplete search results during that window. diff --git a/doc/release-notes/10930-marketplace-external-tools-apis.md b/doc/release-notes/10930-marketplace-external-tools-apis.md new file mode 100644 index 00000000000..e3350a8b2d2 --- /dev/null +++ b/doc/release-notes/10930-marketplace-external-tools-apis.md @@ -0,0 +1,14 @@ +## New APIs for External Tools Registration for Marketplace + +New API base path /api/externalTools created that mimics the admin APIs /api/admin/externalTools. These new add and delete apis require an authenticated superuser token. + +Example: +``` + API_TOKEN='xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx' + export TOOL_ID=1 + + curl http://localhost:8080/api/externalTools + curl http://localhost:8080/api/externalTools/$TOOL_ID + curl -s -H "X-Dataverse-key:$API_TOKEN" -X POST -H 'Content-type: application/json' http://localhost:8080/api/externalTools --upload-file fabulousFileTool.json + curl -s -H "X-Dataverse-key:$API_TOKEN" -X DELETE http://localhost:8080/api/externalTools/$TOOL_ID +``` diff --git a/doc/release-notes/10943-featured-items.md b/doc/release-notes/10943-featured-items.md new file mode 100644 index 00000000000..fa61d4e4875 --- /dev/null +++ b/doc/release-notes/10943-featured-items.md @@ -0,0 +1,15 @@ +CRUD endpoints for Collection Featured Items have been implemented. In particular, the following endpoints have been implemented: + +- Create a feature item (POST /api/dataverses//featuredItems) +- Update a feature item (PUT /api/dataverseFeaturedItems/) +- Delete a feature item (DELETE /api/dataverseFeaturedItems/) +- List all featured items in a collection (GET /api/dataverses//featuredItems) +- Delete all featured items in a collection (DELETE /api/dataverses//featuredItems) +- Update all featured items in a collection (PUT /api/dataverses//featuredItems) + +New settings: + +- dataverse.files.featured-items.image-maxsize - It sets the maximum allowed size of the image that can be added to a featured item. +- dataverse.files.featured-items.image-uploads - It specifies the name of the subdirectory for saving featured item images within the docroot directory. + +See also #10943 and #11124. diff --git a/doc/release-notes/10975-fix-file-replace-via-api b/doc/release-notes/10975-fix-file-replace-via-api new file mode 100644 index 00000000000..71646015a32 --- /dev/null +++ b/doc/release-notes/10975-fix-file-replace-via-api @@ -0,0 +1,3 @@ +A bug that caused replacing files via API when file PIDs were enabled has been fixed. + + For testing purposes, the FAKE PID provider can now be used with file PIDs enabled. (The FAKE provider is not recommended for any production use.) \ No newline at end of file diff --git a/doc/release-notes/11038-unconsidered-harvesting-granularity.md b/doc/release-notes/11038-unconsidered-harvesting-granularity.md new file mode 100644 index 00000000000..72ebd522831 --- /dev/null +++ b/doc/release-notes/11038-unconsidered-harvesting-granularity.md @@ -0,0 +1,2 @@ +Bug Fix: +OAI Client harvesting now uses the correct granularity while re-run a partial harvest (using the `from` parameter). The correct granularity comes from the `Identify` verb request. \ No newline at end of file diff --git a/doc/release-notes/11053-metadata-styling-fixes b/doc/release-notes/11053-metadata-styling-fixes new file mode 100644 index 00000000000..c1f2c69c283 --- /dev/null +++ b/doc/release-notes/11053-metadata-styling-fixes @@ -0,0 +1 @@ +Minor styling fixes for the Related Publication Field and fields using ORCID or ROR have been made (see #11053, #10964, #11106) diff --git a/doc/release-notes/11075-ror.md b/doc/release-notes/11075-ror.md new file mode 100644 index 00000000000..de32eebccdf --- /dev/null +++ b/doc/release-notes/11075-ror.md @@ -0,0 +1,15 @@ +### ROR (Research Organization Registry) as Author Identifier Type + +ROR (Research Organization Registry) has been added as an Author Identifier Type (alongside ORCID, etc.) for when the author is an organization rather than a person. As with all author identifiers, be sure to select the proper identifier type (e.g. "ROR") and enter just the unique identifier (e.g. "03vek6s52") rather than the full URL (e.g. "https://ror.org/03vek6s52"). Like ORCID, ROR will appear in the "Datacite" metadata export format. See also the [ROR](https://ror.org) website, a new [note](https://dataverse-guide--11118.org.readthedocs.build/en/11118/user/dataset-management.html#adding-a-new-dataset) in a guides about entering author identifiers, #11075, and #11118. + +## Upgrade Instructions + +6\. Update metadata blocks + +These changes reflect incremental improvements made to the handling of core metadata fields. Expect the loading of the citation block to take several seconds because of its size (especially due to the number of languages). + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.6/scripts/api/data/metadatablocks/citation.tsv + +curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file citation.tsv +``` diff --git a/doc/release-notes/11083-mydata-npe-with-harvested-dataverses.md b/doc/release-notes/11083-mydata-npe-with-harvested-dataverses.md new file mode 100644 index 00000000000..230d69c9b9f --- /dev/null +++ b/doc/release-notes/11083-mydata-npe-with-harvested-dataverses.md @@ -0,0 +1 @@ +Fix a bug with My Data where listing dataverses for a user with only rights on harvested dataverses would result in a server error response. \ No newline at end of file diff --git a/doc/release-notes/11095-fix-extcvoc-indexing.md b/doc/release-notes/11095-fix-extcvoc-indexing.md new file mode 100644 index 00000000000..f4931d81263 --- /dev/null +++ b/doc/release-notes/11095-fix-extcvoc-indexing.md @@ -0,0 +1,7 @@ +Some External Controlled Vocabulary scripts/configurations, when used on a metadata field that is single-valued could result +in indexing failure for the dataset (e.g. when the script tried to index both the identifier and name of the identified entity for indexing). +Dataverse has been updated to correctly indicate the need for a multi-valued Solr field in these cases in the call to /api/admin/index/solr/schema. +Configuring the Solr schema and the update-fields.sh script as usually recommended when using custom metadata blocks will resolve the issue. + +The overall release notes should include a Solr update (which hopefully is required by an update to 9.7.0 anyway) and our standard instructions +should change to recommending use of the update-fields.sh script when using custom metadatablocks *and/or external vocabulary scripts*. diff --git a/doc/release-notes/11107-fake-to-perma-demo.md b/doc/release-notes/11107-fake-to-perma-demo.md new file mode 100644 index 00000000000..afb6b8a7917 --- /dev/null +++ b/doc/release-notes/11107-fake-to-perma-demo.md @@ -0,0 +1,3 @@ +### Demo/Eval Container Tutorial + +The demo/eval container tutorial has been updated to use the Permalink PID provider instead of the FAKE DOI Provider. See also #11107. diff --git a/doc/release-notes/11113-avoid-orphan-perm-docs.md b/doc/release-notes/11113-avoid-orphan-perm-docs.md new file mode 100644 index 00000000000..4c52d72d7db --- /dev/null +++ b/doc/release-notes/11113-avoid-orphan-perm-docs.md @@ -0,0 +1,5 @@ +This release fixes a bug that caused Dataverse to generate unnecessary solr documents for files when a file is added/deleted from a draft dataset. These documents could accumulate and potentially impact performance. + +Assuming the upgrade to solr 9.7.0 also occurs in this release, there's nothing else needed for this PR. (Starting with a new solr insures the solr db is empty and that a reindex is already required.) + + diff --git a/doc/release-notes/11120-add-3d-object-metadata-block.md b/doc/release-notes/11120-add-3d-object-metadata-block.md new file mode 100644 index 00000000000..a43be86bc7d --- /dev/null +++ b/doc/release-notes/11120-add-3d-object-metadata-block.md @@ -0,0 +1,60 @@ +### New 3D Object Data Metadata Block + +A new metadata block has been added for describing 3D object data. You can download it from the [guides](https://dataverse-guide--11167.org.readthedocs.build/en/11167/user/appendix.html). See also #11120 and #11167. + +All new Dataverse installations will receive this metadata block by default. We recommend adding it by following the upgrade instructions below. + +## Upgrade Instructions + +### For 6.6-Release-notes.md + +6\. Restart Payara + +7\. Update metadata blocks + +These changes reflect incremental improvements made to the handling of core metadata fields. + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.6/scripts/api/data/metadatablocks/citation.tsv + +curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file citation.tsv +``` +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.6/scripts/api/data/metadatablocks/3d_objects.tsv + +curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file 3d_objects.tsv +``` + +8\. Update Solr schema.xml file. Start with the standard v6.6 schema.xml, then, if your installation uses any custom or experimental metadata blocks, update it to include the extra fields (step 8a). + +Stop Solr (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.6/installation/prerequisites.html#solr-init-script)). + +```shell +service solr stop +``` + +Replace schema.xml + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.6/conf/solr/schema.xml +cp schema.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf +``` + +Start Solr (but if you use any custom metadata blocks or adding 3D Objects, perform the next step, 8a first). + +```shell +service solr start +``` +8a\. For installations with custom or experimental metadata blocks: + +Before starting Solr, update the schema to include all the extra metadata fields that your installation uses. We do this by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed to reflect the names of the directories, if different): + +```shell + wget https://raw.githubusercontent.com/IQSS/dataverse/v6.6/conf/solr/update-fields.sh + chmod +x update-fields.sh + curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-9.4.1/server/solr/collection1/conf/schema.xml +``` + +Now start Solr. + +9\. Reindex Solr diff --git a/doc/release-notes/11127-update-search-api-to-show-dvobject-type-counts.md b/doc/release-notes/11127-update-search-api-to-show-dvobject-type-counts.md new file mode 100644 index 00000000000..c9b63340a1f --- /dev/null +++ b/doc/release-notes/11127-update-search-api-to-show-dvobject-type-counts.md @@ -0,0 +1,5 @@ +### show-type-counts Behavior changed in Search API + +In the Search API if you set show_type_counts=true the response will include all object types (Dataverses, Datasets, and Files) even if the search result for any given type is 0. + +See also the [guides](https://preview.guides.gdcc.io/en/develop/api/search.html#parameters), #11127 and #11138. diff --git a/doc/release-notes/11129-send-feedback-to-contacts.md b/doc/release-notes/11129-send-feedback-to-contacts.md new file mode 100644 index 00000000000..56eff133e0d --- /dev/null +++ b/doc/release-notes/11129-send-feedback-to-contacts.md @@ -0,0 +1,14 @@ +This feature adds a new API to send feedback to the Collection, Dataset, or DataFile's contacts. +Similar to the "admin/feedback" API the "sendfeedback" API sends an email to all the contacts listed for the Dataset. The main differences for this feature are: +1. This API is not limited to Admins +2. This API does not return the email addresses in the "toEmail" and "ccEmail" elements for privacy reasons +3. This API can be rate limited to avoid spamming +4. The body size limit can be configured +5. The body will be stripped of any html code to prevent malicious scripts or links +6. The fromEmail will be validated for correct format + +To set the Rate Limiting for guest users (See Rate Limiting Configuration for more details. This example allows 1 send per hour for any guest) +``curl http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction -X PUT -d '[{\"tier\": 0, \"limitPerHour\": 1, \"actions\": [\"CheckRateLimitForDatasetFeedbackCommand\"]}]'`` + +To set the message size limit (example limit of 1080 chars): +``curl -X PUT -d 1080 http://localhost:8080/api/admin/settings/:ContactFeedbackMessageSizeLimit`` diff --git a/doc/release-notes/11130-update-dataverse-api-remove-metadatablocks.md b/doc/release-notes/11130-update-dataverse-api-remove-metadatablocks.md new file mode 100644 index 00000000000..e71a67142b3 --- /dev/null +++ b/doc/release-notes/11130-update-dataverse-api-remove-metadatablocks.md @@ -0,0 +1,8 @@ +### Fixes consequences for not adding some optional fields in update dataverse API + +Omitting optional fields inputLevels, facetIds, or metadataBlockNames caused data to be deleted. +This fix no longer deletes data for these fields. Two new flags have been added to the ``metadataBlocks`` Json object to signal the deletion of the data. +- ``inheritMetadataBlocksFromParent: true`` will remove ``metadataBlockNames`` and ``inputLevels`` if the Json objects are omitted. +- ``inheritFacetsFromParent: true`` will remove ``facetIds`` if the Json object is omitted. + +For more information, see issue [#11130](https://github.com/IQSS/dataverse/issues/11130) diff --git a/doc/release-notes/11133-search-fix.md b/doc/release-notes/11133-search-fix.md new file mode 100644 index 00000000000..88962b70ea0 --- /dev/null +++ b/doc/release-notes/11133-search-fix.md @@ -0,0 +1,3 @@ +### Search fix when using AVOID_EXPENSIVE_SOLR_JOIN=true + +Dataverse v6.5 introduced a bug which causes search to fail for non-superusers in multiple groups when the AVOID_EXPENSIVE_SOLR_JOIN feature flag is set to true. This releases fixes the bug. diff --git a/doc/release-notes/11136-solr-nested.md b/doc/release-notes/11136-solr-nested.md new file mode 100644 index 00000000000..f85c02cde20 --- /dev/null +++ b/doc/release-notes/11136-solr-nested.md @@ -0,0 +1 @@ +Deeply nested metadata fields are not supported but the code used to generate the Solr schema has been adjusted to support them. See #11136. diff --git a/doc/release-notes/11142-more-detailed-file-differences.md b/doc/release-notes/11142-more-detailed-file-differences.md new file mode 100644 index 00000000000..d7425b8d2fa --- /dev/null +++ b/doc/release-notes/11142-more-detailed-file-differences.md @@ -0,0 +1,3 @@ +The file page version table now shows more detail, e.g. when there are metadata changes or whether a file has been replaced. +A bug that causes adding free-form provenance to a file to fail has been fixed. +See also #11142 and #11145. diff --git a/doc/release-notes/11159-preview-url-update.md b/doc/release-notes/11159-preview-url-update.md new file mode 100644 index 00000000000..2a84540ae75 --- /dev/null +++ b/doc/release-notes/11159-preview-url-update.md @@ -0,0 +1,7 @@ +### Preview URL popup updated + +The Preview URL popup and related documentation has been updated to give the dataset more information about anonymous access including the names of the dataset fields that will be withheld from the url user and suggesting how to review the url before releasing it. See also #11159 and #11164. + +###Bug Fix + +Bug which causes users of the Anonymous Review URL to have some metadata of published datasets withheld has been fixed. See #11202 \ No newline at end of file diff --git a/doc/release-notes/8739-publisher-during-harvesting.md b/doc/release-notes/8739-publisher-during-harvesting.md new file mode 100644 index 00000000000..3e1555396c0 --- /dev/null +++ b/doc/release-notes/8739-publisher-during-harvesting.md @@ -0,0 +1 @@ +The publisher value of harvested datasets is now attributed to the dataset's distributor instead of its producer. This improves the citation associated with these datasets, but the change only affects newly harvested datasets. All datasets should be re-harvested if you wish to pick up this change on already harvested datasets. For more information, see [the guides](https://dataverse-guide--9013.org.readthedocs.build/en/9013/admin/harvestclients.html#harvesting-client-changelog), #8739, and #9013. \ No newline at end of file diff --git a/doc/release-notes/8808-10575-update-global-role.md b/doc/release-notes/8808-10575-update-global-role.md new file mode 100644 index 00000000000..38142f972e8 --- /dev/null +++ b/doc/release-notes/8808-10575-update-global-role.md @@ -0,0 +1,11 @@ +## Release Highlights + +### Update a Global Role + +A new API endpoint has been added that allows a global role to be updated. See [Native API Guide > Update Global Role](https://guides.dataverse.org/en/6.3/api/native-api.html#update-global-role) (#10612) + +## Bug fixes + +### Edition of custom role fixed + +It is now possible to edit a custom role with the same alias (reported in #8808) \ No newline at end of file diff --git a/doc/release-notes/9294-improvement-and-internationalization-of-harvest-status.md b/doc/release-notes/9294-improvement-and-internationalization-of-harvest-status.md new file mode 100644 index 00000000000..f9fc465292c --- /dev/null +++ b/doc/release-notes/9294-improvement-and-internationalization-of-harvest-status.md @@ -0,0 +1,6 @@ +## Improvement and internationalization of harvest status + +Added a harvest status to differentiate a complete harvest with errors (Completed with failures) and without errors (Completed) +Harvest status labels are now internationalized + +For more information, see issue [#9294](https://github.com/IQSS/dataverse/issues/9294) \ No newline at end of file diff --git a/doc/release-notes/openaire_fix.md b/doc/release-notes/openaire_fix.md new file mode 100644 index 00000000000..49a809db14b --- /dev/null +++ b/doc/release-notes/openaire_fix.md @@ -0,0 +1 @@ +bugfix: openaire implementation can now correctly process one or multiple productionPlaces as geolocation \ No newline at end of file diff --git a/doc/sphinx-guides/source/_static/api/dataset-add-single-compound-field-metadata.json b/doc/sphinx-guides/source/_static/api/dataset-add-single-compound-field-metadata.json new file mode 100644 index 00000000000..f49a9e47d5b --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/dataset-add-single-compound-field-metadata.json @@ -0,0 +1,13 @@ +{ + "fields": [ + { + "typeName": "targetSampleSize", + "value": { + "targetSampleActualSize": { + "typeName": "targetSampleSizeFormula", + "value": "n = N*X / (X + N โ€“ 1)" + } + } + } + ] +} \ No newline at end of file diff --git a/doc/sphinx-guides/source/_static/api/dataset-add-single-cvoc-field-metadata.json b/doc/sphinx-guides/source/_static/api/dataset-add-single-cvoc-field-metadata.json new file mode 100644 index 00000000000..620f3df10d1 --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/dataset-add-single-cvoc-field-metadata.json @@ -0,0 +1,4 @@ +{ + "typeName": "journalArticleType", + "value": "abstract" +} \ No newline at end of file diff --git a/doc/sphinx-guides/source/_static/api/dataset-migrate.jsonld b/doc/sphinx-guides/source/_static/api/dataset-migrate.jsonld index 8f43d1dd6e9..bd882846da1 100644 --- a/doc/sphinx-guides/source/_static/api/dataset-migrate.jsonld +++ b/doc/sphinx-guides/source/_static/api/dataset-migrate.jsonld @@ -1,11 +1,31 @@ { "citation:depositor": "Admin, Dataverse", "title": "Test Dataset", +"socialscience:collectionMode": [ + "demonstration" +], "subject": "Computer and Information Science", +"geospatial:geographicCoverage": [ + { + "geospatial:otherGeographicCoverage": "Cambridge" + }, + { + "geospatial:otherGeographicCoverage": "Massachusetts" + } +], "author": { "citation:authorName": "Admin, Dataverse", "citation:authorAffiliation": "GDCC" }, +"kindOfData": "demonstration data", +"citation:keyword": [ + { + "citation:keywordValue": "first keyword" + }, + { + "citation:keywordValue": "second keyword" + } +], "dateOfDeposit": "2020-10-08", "citation:distributor": { "citation:distributorName": "Demo Dataverse Repository", @@ -35,5 +55,9 @@ "title": "http://purl.org/dc/terms/title", "citation": "https://dataverse.org/schema/citation/", "dvcore": "https://dataverse.org/schema/core#", - "schema": "http://schema.org/" -}} + "schema": "http://schema.org/", + "geospatial": "dataverse.siteUrl/schema/geospatial#", + "socialscience": "dataverse.siteUrl/schema/socialscience#", + "kindOfData": "http://rdf-vocabulary.ddialliance.org/discovery#kindOfData" + } +} diff --git a/doc/sphinx-guides/source/_static/api/transform-oai-ore-jsonld.xq b/doc/sphinx-guides/source/_static/api/transform-oai-ore-jsonld.xq new file mode 100644 index 00000000000..6292f39fbde --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/transform-oai-ore-jsonld.xq @@ -0,0 +1,16 @@ +declare option output:method "json"; + +let $parameters:={ 'method': 'json' } +for $record in /json + let $metadata:=$record/ore_003adescribes + + + let $json:= + + {$metadata/*} + {$record/_0040context} + + + + return if ($metadata) then + file:write("converted.json",$json, $parameters) \ No newline at end of file diff --git a/doc/sphinx-guides/source/admin/discoverability.rst b/doc/sphinx-guides/source/admin/discoverability.rst index 19ef7250a29..22ff66246f0 100644 --- a/doc/sphinx-guides/source/admin/discoverability.rst +++ b/doc/sphinx-guides/source/admin/discoverability.rst @@ -51,7 +51,7 @@ The Dataverse team has been working with Google on both formats. Google has `ind Signposting +++++++++++ -The Dataverse software supports `Signposting `_. This allows machines to request more information about a dataset through the `Link `_ HTTP header. +The Dataverse software supports `Signposting `_. This allows machines to request more information about a dataset through the `Link `_ HTTP header. Links to all enabled metadata export formats are given. See :ref:`metadata-export-formats` for a list. There are 2 Signposting profile levels, level 1 and level 2. In this implementation, * Level 1 links are shown `as recommended `_ in the "Link" diff --git a/doc/sphinx-guides/source/admin/external-tools.rst b/doc/sphinx-guides/source/admin/external-tools.rst index 346ca0b15ee..c3e71c13ac6 100644 --- a/doc/sphinx-guides/source/admin/external-tools.rst +++ b/doc/sphinx-guides/source/admin/external-tools.rst @@ -35,7 +35,13 @@ Configure the tool with the curl command below, making sure to replace the ``fab .. code-block:: bash - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools --upload-file fabulousFileTool.json + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools --upload-file fabulousFileTool.json + +This API is Superuser only. Note the endpoint difference (/api/externalTools instead of /api/admin/externalTools). + +.. code-block:: bash + + curl -s -H "X-Dataverse-key:$API_TOKEN" -X POST -H 'Content-type: application/json' http://localhost:8080/api/externalTools --upload-file fabulousFileTool.json Listing All External Tools in a Dataverse Installation ++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -46,6 +52,12 @@ To list all the external tools that are available in a Dataverse installation: curl http://localhost:8080/api/admin/externalTools +This API is open to any user. Note the endpoint difference (/api/externalTools instead of /api/admin/externalTools). + +.. code-block:: bash + + curl http://localhost:8080/api/externalTools + Showing an External Tool in a Dataverse Installation ++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -56,6 +68,12 @@ To show one of the external tools that are available in a Dataverse installation export TOOL_ID=1 curl http://localhost:8080/api/admin/externalTools/$TOOL_ID +This API is open to any user. Note the endpoint difference (/api/externalTools instead of /api/admin/externalTools). + +.. code-block:: bash + + curl http://localhost:8080/api/externalTools/$TOOL_ID + Removing an External Tool From a Dataverse Installation +++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -66,6 +84,12 @@ Assuming the external tool database id is "1", remove it with the following comm export TOOL_ID=1 curl -X DELETE http://localhost:8080/api/admin/externalTools/$TOOL_ID +This API is Superuser only. Note the endpoint difference (/api/externalTools instead of /api/admin/externalTools). + +.. code-block:: bash + + curl -s -H "X-Dataverse-key:$API_TOKEN" -X DELETE http://localhost:8080/api/externalTools/$TOOL_ID + .. _testing-external-tools: Testing External Tools diff --git a/doc/sphinx-guides/source/admin/harvestclients.rst b/doc/sphinx-guides/source/admin/harvestclients.rst index c4c63c80786..38a00d6921c 100644 --- a/doc/sphinx-guides/source/admin/harvestclients.rst +++ b/doc/sphinx-guides/source/admin/harvestclients.rst @@ -48,6 +48,11 @@ Each harvesting client run logs a separate file per run to the app server's defa Note that you'll want to run a minimum of Dataverse Software 4.6, optimally 4.18 or beyond, for the best OAI-PMH interoperability. +Harvesting Client Changelog +--------------------------- + +- As of Dataverse 6.5, the publisher value of harvested datasets is now attributed to the dataset's distributor instead of its producer. This change affects all newly harvested datasets. For more information, see https://github.com/IQSS/dataverse/pull/9013 + Harvesting Non-OAI-PMH ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index 4c9dc693a0d..df07b65153b 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -272,7 +272,11 @@ Each of the three main sections own sets of properties: | | โ€œValueโ€ field is used as the identifier. | | +--------------+--------------------------------------------+-----------------------------------------+ | displayOrder | Control the order in which the enumerated | Non-negative integer. | -| | values are displayed for selection. | | +| | values are displayed for selection. When | | +| | adding new values, you don't have to add | | +| | them at the end. You can renumber existing | | +| | values to update the order in which they | | +| | appear. | | +--------------+--------------------------------------------+-----------------------------------------+ FieldType definitions @@ -544,7 +548,7 @@ a necessary re-index, but for your custom metadata you will need to keep track o Please note also that if you are going to make a pull request updating ``conf/solr/schema.xml`` with fields you have added, you should first load all the custom metadata blocks in ``scripts/api/data/metadatablocks`` (including ones you -don't care about) to create a complete list of fields. (This might change in the future.) +don't care about) to create a complete list of fields. (This might change in the future.) Please see :ref:`update-solr-schema-dev` in the Developer Guide. Reloading a Metadata Block -------------------------- @@ -564,8 +568,7 @@ Using External Vocabulary Services The Dataverse software has a mechanism to associate specific fields defined in metadata blocks with a vocabulary(ies) managed by external services. The mechanism relies on trusted third-party Javascripts. The mapping from field type to external vocabulary(ies) is managed via the :ref:`:CVocConf <:CVocConf>` setting. -*This functionality is considered 'experimental'. It may require significant effort to configure and is likely to evolve in subsequent Dataverse software releases.* - +*This functionality may require significant effort to configure and is likely to evolve in subsequent Dataverse software releases.* The effect of configuring this mechanism is similar to that of defining a field in a metadata block with 'allowControlledVocabulary=true': @@ -590,6 +593,9 @@ Configuration involves specifying which fields are to be mapped, to which Solr f These are all defined in the :ref:`:CVocConf <:CVocConf>` setting as a JSON array. Details about the required elements as well as example JSON arrays are available at https://github.com/gdcc/dataverse-external-vocab-support, along with an example metadata block that can be used for testing. The scripts required can be hosted locally or retrieved dynamically from https://gdcc.github.io/ (similar to how dataverse-previewers work). +Since external vocabulary scripts can change how fields are indexed (storing an identifier and name and/or values in different languages), +updating the Solr schema as described in :ref:`update-solr-schema` should be done after adding new scripts to your configuration. + Please note that in addition to the :ref:`:CVocConf` described above, an alternative is the :ref:`:ControlledVocabularyCustomJavaScript` setting. Protecting MetadataBlocks diff --git a/doc/sphinx-guides/source/api/auth.rst b/doc/sphinx-guides/source/api/auth.rst index 210c1bcd184..8dffb914e29 100644 --- a/doc/sphinx-guides/source/api/auth.rst +++ b/doc/sphinx-guides/source/api/auth.rst @@ -87,22 +87,20 @@ To register a new user who has authenticated via an OIDC provider, the following curl -H "Authorization: Bearer $TOKEN" -X POST http://localhost:8080/api/users/register --data '{"termsAccepted":true}' -If the feature flag ``api-bearer-auth-handle-tos-acceptance-in-idp``` is disabled, it is essential to send a JSON that includes the property ``termsAccepted``` set to true, indicating that you accept the Terms of Use of the installation. Otherwise, you will not be able to create an account. However, if the feature flag is enabled, Terms of Service acceptance is handled by the identity provider, and it is no longer necessary to include the ``termsAccepted``` parameter in the JSON. - -In this JSON, we can also include the fields ``position`` or ``affiliation``, in the same way as when we register a user through the Dataverse UI. These fields are optional, and if not provided, they will be persisted as empty in Dataverse. - -There is another flag called ``api-bearer-auth-provide-missing-claims`` that can be enabled to allow sending missing user claims in the registration JSON. This is useful when the identity provider does not supply the necessary claims. However, this flag will only be considered if the ``api-bearer-auth`` feature flag is enabled. If the latter is not enabled, the ``api-bearer-auth-provide-missing-claims`` flag will be ignored. - -With the ``api-bearer-auth-provide-missing-claims`` feature flag enabled, you can include the following properties in the request JSON: +By default, the Bearer token is expected to include the following claims that will be used to create the user account: - ``username`` - ``firstName`` - ``lastName`` - ``emailAddress`` -If properties are provided in the JSON, but corresponding claims already exist in the identity provider, an error will be thrown, outlining the conflicting properties. +The one parameter required by default is ``termsAccepted`` which must be set to true, indicating that the user has seen and accepted the Terms of Use of the installation. + +If the feature flag ``api-bearer-auth-handle-tos-acceptance-in-idp`` is enabled (along with the ``api-bearer-auth`` feature flag), Dataverse assumes that the Terms of Service acceptance was handled by the identity provider, e.g. in the OIDC ``consent`` dialog, and the ``termsAccepted`` parameter is not needed. + +There is another flag called ``api-bearer-auth-provide-missing-claims`` that can be enabled (along with the ``api-bearer-auth`` feature flag) to allow sending missing user claims in the registration JSON. This is useful when the identity provider does not supply the necessary claims listed above. If properties are provided in the JSON, but corresponding claims already exist in the identity provider, an error will be thrown, outlining the conflicting properties. Note that supplying missing claims is configured via a separate feature flag because using it may introduce user impersonation issues, for example if the identity provider does not provide an email field and the user submits an email address they do not own. -This functionality is included under a feature flag because using it may introduce user impersonation issues, for example if the identity provider does not provide an email field and the user submits an email address they do not own. +In all cases, the submitted JSON can optionally include the fields ``position`` or ``affiliation``, which will be added to the user's Dataverse account. These fields are optional, and if not provided, they will be persisted as empty in Dataverse. Signed URLs ----------- diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 162574e7799..15dabecda35 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -11,6 +11,8 @@ v6.6 ---- - **/api/metadatablocks** is no longer returning duplicated metadata properties and does not omit metadata properties when called. +- **/api/roles**: :ref:`show-role` now properly returns 403 Forbidden instead of 401 Unauthorized when you pass a working API token that doesn't have the right permission. +- The content type for the ``schema.org`` dataset metadata export format has been corrected. It was ``application/json`` and now it is ``application/ld+json``. See also :ref:`export-dataset-metadata-api`. v6.5 ---- diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index cf088963b7d..b8070299511 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -128,12 +128,23 @@ Note that setting any of these fields overwrites the previous configuration. When it comes to omitting these fields in the JSON: -- Omitting ``facetIds`` or ``metadataBlockNames`` causes the Dataverse collection to inherit the corresponding configuration from its parent. -- Omitting ``inputLevels`` removes any existing custom input levels in the Dataverse collection. -- Omitting the entire ``metadataBlocks`` object in the request JSON would exclude the three sub-objects, resulting in the application of the two changes described above. +- Omitting ``facetIds`` or ``metadataBlockNames`` causes no change to the Dataverse collection. To delete the current configuration and inherit the corresponding configuration from its parent include the flag ``inheritFacetsFromParent`` and/or ``inheritMetadataBlocksFromParent`` respectively. +- Omitting ``inputLevels`` causes no change to the Dataverse collection. Including the flag ``inheritMetadataBlocksFromParent`` will cause the custom ``inputLevels`` to be deleted and inherited from the parent. +- Omitting the entire ``metadataBlocks`` object in the request JSON would cause no change to the ``inputLevels``, ``facetIds`` or ``metadataBlockNames`` of the Dataverse collection. To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. +To force the configurations to be deleted and inherited from the parent's configuration include the following ``metadataBlocks`` object in your JSON + +.. code-block:: json + + "metadataBlocks": { + "inheritMetadataBlocksFromParent": true, + "inheritFacetsFromParent": true + } + +.. note:: Including both the list ``metadataBlockNames`` and the flag ``"inheritMetadataBlocksFromParent": true`` will result in an error being returned {"status": "ERROR", "message": "Metadata block can not contain both metadataBlockNames and inheritMetadataBlocksFromParent: true"}. The same is true for ``facetIds`` and ``inheritFacetsFromParent``. + See also :ref:`collection-attributes-api`. .. _view-dataverse: @@ -424,13 +435,13 @@ Creates a new role under Dataverse collection ``id``. Needs a json file with the export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$ID/roles" --upload-file roles.json + curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:application/json" -X POST "$SERVER_URL/api/dataverses/$ID/roles" --upload-file roles.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-type:application/json" "https://demo.dataverse.org/api/dataverses/root/roles" --upload-file roles.json + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Content-type:application/json" -X POST "https://demo.dataverse.org/api/dataverses/root/roles" --upload-file roles.json For ``roles.json`` see :ref:`json-representation-of-a-role` @@ -529,6 +540,8 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/dataverses/root/assignments/6" +.. _list-metadata-blocks-for-a-collection: + List Metadata Blocks Defined on a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -556,6 +569,7 @@ This endpoint supports the following optional query parameters: - ``returnDatasetFieldTypes``: Whether or not to return the dataset field types present in each metadata block. If not set, the default value is false. - ``onlyDisplayedOnCreate``: Whether or not to return only the metadata blocks that are displayed on dataset creation. If ``returnDatasetFieldTypes`` is true, only the dataset field types shown on dataset creation will be returned within each metadata block. If not set, the default value is false. +- ``datasetType``: Optionally return additional fields from metadata blocks that are linked with a particular dataset type (see :ref:`dataset-types` in the User Guide). Pass a single dataset type as a string. For a list of dataset types you can pass, see :ref:`api-list-dataset-types`. An example using the optional query parameters is presented below: @@ -564,14 +578,17 @@ An example using the optional query parameters is presented below: export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org export ID=root + export DATASET_TYPE=software - curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/metadatablocks?returnDatasetFieldTypes=true&onlyDisplayedOnCreate=true" + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/metadatablocks?returnDatasetFieldTypes=true&onlyDisplayedOnCreate=true&datasetType=$DATASET_TYPE" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablocks?returnDatasetFieldTypes=true&onlyDisplayedOnCreate=true" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablocks?returnDatasetFieldTypes=true&onlyDisplayedOnCreate=true&datasetType=software" + +.. _define-metadata-blocks-for-a-dataverse-collection: Define Metadata Blocks for a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -598,6 +615,8 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-type:application/json" --upload-file define-metadatablocks.json "https://demo.dataverse.org/api/dataverses/root/metadatablocks" +An alternative to defining metadata blocks at a collection level is to create and use a dataset type. See :ref:`api-link-dataset-type`. + Determine if a Dataverse Collection Inherits Its Metadata Blocks from Its Parent ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1145,6 +1164,209 @@ Use the ``/settings`` API to enable or disable the enforcement of storage quotas curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:UseStorageQuotas +List All Collection Featured Items +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +List the featured items configured for a given Dataverse collection ``id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/featuredItems" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/featuredItems" + +Update All Collection Featured Items +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Updates all featured items in the given Dataverse collection ``id``. + +The data sent to the endpoint represents the desired final state of the featured items in the Dataverse collection and overwrites any existing featured items configuration. + +The parameters ``id``, ``content``, ``displayOrder``, and ``fileName`` must be specified as many times as the number of items we want to add or update. The order in which these parameters are repeated must match to ensure they correspond to the same featured item. + +The ``file`` parameter must be specified for each image we want to attach to featured items. Note that images can be shared between featured items, so ``fileName`` can have the same value in different featured items. + +The ``id`` parameter must be ``0`` for new items or set to the item's identifier for updates. The ``fileName`` parameter should be empty to exclude an image or match the name of a file sent in a ``file`` parameter to set a new image. ``keepFile`` must always be set to ``false``, unless it's an update to a featured item where we want to preserve the existing image, if one exists. + +Note that any existing featured item not included in the call with its associated identifier and corresponding properties will be removed from the collection. + +The following example creates two featured items, with an image assigned to the second one: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + export FIRST_ITEM_CONTENT='Content 1' + export FIRST_ITEM_DISPLAY_ORDER=1 + + export SECOND_ITEM_IMAGE_FILENAME='image.png' + export SECOND_ITEM_CONTENT='Content 2' + export SECOND_ITEM_DISPLAY_ORDER=2 + + curl -H "X-Dataverse-key:$API_TOKEN" \ + -X PUT \ + -F "id=0" -F "id=0" \ + -F "content=$FIRST_ITEM_CONTENT" -F "content=$SECOND_ITEM_CONTENT" \ + -F "displayOrder=$FIRST_ITEM_DISPLAY_ORDER" -F "displayOrder=$SECOND_ITEM_DISPLAY_ORDER" \ + -F "fileName=" -F "fileName=$SECOND_ITEM_IMAGE_FILENAME" \ + -F "keepFile=false" -F "keepFile=false" \ + -F "file=@$SECOND_ITEM_IMAGE_FILENAME" \ + "$SERVER_URL/api/dataverses/$ID/featuredItems" + + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" \ + -X PUT \ + -F "id=0" -F "id=0" \ + -F "content=Content 1" -F "content=Content 2" \ + -F "displayOrder=1" -F "displayOrder=2" \ + -F "fileName=" -F "fileName=image.png" \ + -F "keepFile=false" -F "keepFile=false" \ + -F "file=@image.png" \ + "https://demo.dataverse.org/api/dataverses/root/featuredItems" + +The following example creates one featured item and updates a second one, keeping the existing image it may have had: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" \ + -X PUT \ + -F "id=0" -F "id=1" \ + -F "content=Content 1" -F "content=Updated content 2" \ + -F "displayOrder=1" -F "displayOrder=2" \ + -F "fileName=" -F "fileName=" \ + -F "keepFile=false" -F "keepFile=true" \ + "https://demo.dataverse.org/api/dataverses/root/featuredItems" + +Delete All Collection Featured Items +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Deletes the featured items configured for a given Dataverse collection ``id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/dataverses/$ID/featuredItems" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/dataverses/root/featuredItems" + +Create a Collection Featured Item +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Creates a featured item in the given Dataverse collection ``id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export IMAGE_FILENAME='image.png' + export CONTENT='Content for featured item.' + export DISPLAY_ORDER=1 + export SERVER_URL=https://demo.dataverse.org + export ID=root + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST -F "file=@$IMAGE_FILENAME" -F "content=$CONTENT" -F "displayOrder=$DISPLAY_ORDER" "$SERVER_URL/api/dataverses/$ID/featuredItems" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -F "file=@image.png" -F "content=Content for featured item." -F "displayOrder=1" "https://demo.dataverse.org/api/dataverses/root/featuredItems" + +A featured item may or may not contain an image. If you wish to create it without an image, omit the file parameter in the request. + +Update a Collection Featured Item +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Updates a featured item given its ``id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export IMAGE_FILENAME='image.png' + export CONTENT='Content for featured item.' + export DISPLAY_ORDER=1 + export SERVER_URL=https://demo.dataverse.org + export ID=1 + + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -F "file=@$IMAGE_FILENAME" -F "content=$CONTENT" -F "displayOrder=$DISPLAY_ORDER" "$SERVER_URL/api/dataverseFeaturedItems/$ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT -F "file=@image.png" -F "content=Content for featured item." -F "displayOrder=1" "https://demo.dataverse.org/api/dataverseFeaturedItems/1" + +``content`` and ``displayOrder`` must always be provided; otherwise, an error will occur. Use the ``file`` parameter to set a new image for the featured item. To keep the existing image, omit ``file`` and send ``keepFile=true``. To remove the image, omit the file parameter. + +Updating the featured item keeping the existing image: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT -F "keepFile=true" -F "content=Content for featured item." -F "displayOrder=1" "https://demo.dataverse.org/api/dataverseFeaturedItems/1" + +Updating the featured item removing the existing image: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT -F "content=Content for featured item." -F "displayOrder=1" "https://demo.dataverse.org/api/dataverseFeaturedItems/1" + +Delete a Collection Featured Item +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Deletes a featured item given its ``id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=1 + + curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/dataverseFeaturedItems/$ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/dataverseFeaturedItems/1" + +Get a Collection Featured Item Image +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Returns the image of a featured item if one is assigned, given the featured item ``id``: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=1 + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/access/dataverseFeaturedItemImage/{ID}" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/access/dataverseFeaturedItemImage/1" Datasets -------- @@ -1295,6 +1517,8 @@ It returns a list of versions with their metadata, and file list: The optional ``excludeFiles`` parameter specifies whether the files should be listed in the output. It defaults to ``true``, preserving backward compatibility. (Note that for a dataset with a large number of versions and/or files having the files included can dramatically increase the volume of the output). A separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. +The optional ``excludeMetadataBlocks`` parameter specifies whether the metadata blocks should be listed in the output. It defaults to ``false``, preserving backward compatibility. (Note that for a dataset with a large number of versions and/or metadata blocks having the metadata blocks included can dramatically increase the volume of the output). + The optional ``offset`` and ``limit`` parameters can be used to specify the range of the versions list to be shown. This can be used to paginate through the list in a dataset with a large number of versions. @@ -1319,6 +1543,12 @@ The fully expanded example above (without environment variables) looks like this The optional ``excludeFiles`` parameter specifies whether the files should be listed in the output (defaults to ``true``). Note that a separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0?excludeMetadataBlocks=false" + +The optional ``excludeMetadataBlocks`` parameter specifies whether the metadata blocks should be listed in the output (defaults to ``false``). + By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. @@ -1345,6 +1575,8 @@ Export Metadata of a Dataset in Various Formats |CORS| Export the metadata of the current published version of a dataset in various formats. +To get a list of available formats, see :ref:`available-exporters` and :ref:`get-export-formats`. + See also :ref:`batch-exports-through-the-api` and the note below: .. code-block:: bash @@ -1361,9 +1593,30 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/export?exporter=ddi&persistentId=doi:10.5072/FK2/J8SJZB" -.. note:: Supported exporters (export formats) are ``ddi``, ``oai_ddi``, ``dcterms``, ``oai_dc``, ``schema.org`` , ``OAI_ORE`` , ``Datacite``, ``oai_datacite`` and ``dataverse_json``. Descriptive names can be found under :ref:`metadata-export-formats` in the User Guide. +.. _available-exporters: + +Available Dataset Metadata Exporters +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The following dataset metadata exporters ship with Dataverse: + +- ``Datacite`` +- ``dataverse_json`` +- ``dcterms`` +- ``ddi`` +- ``oai_datacite`` +- ``oai_dc`` +- ``oai_ddi`` +- ``OAI_ORE`` +- ``schema.org`` + +These are the strings to pass as ``$METADATA_FORMAT`` in the examples above. Descriptive names for each format can be found under :ref:`metadata-export-formats` in the User Guide. -.. note:: Additional exporters can be enabled, as described under :ref:`external-exporters` in the Installation Guide. To discover the machine-readable name of each exporter (e.g. ``ddi``), check :ref:`inventory-of-external-exporters` or ``getFormatName`` in the exporter's source code. +Additional exporters can be enabled, as described under :ref:`external-exporters` in the Installation Guide. The machine-readable name/identifier for each external exporter can be found under :ref:`inventory-of-external-exporters`. If you are interested in creating your own exporter, see :doc:`/developers/metadataexport`. + +To discover the machine-readable name of exporters (e.g. ``ddi``) that have been enabled on the installation of Dataverse you are using see :ref:`get-export-formats`. Alternatively, you can use the Signposting "linkset" API documented under :ref:`signposting-api`. + +To discover the machine-readable name of exporters generally, check :ref:`inventory-of-external-exporters` or ``getFormatName`` in the exporter's source code. Schema.org JSON-LD ^^^^^^^^^^^^^^^^^^ @@ -1377,6 +1630,8 @@ Both forms are valid according to Google's Structured Data Testing Tool at https The standard has further evolved into a format called Croissant. For details, see :ref:`schema.org-head` in the Admin Guide. +The ``schema.org`` format changed after Dataverse 6.4 as well. Previously its content type was "application/json" but now it is "application/ld+json". + List Files in a Dataset ~~~~~~~~~~~~~~~~~~~~~~~ @@ -2458,6 +2713,7 @@ The fully expanded example above (without environment variables) looks like this The review process can sometimes resemble a tennis match, with the authors submitting and resubmitting the dataset over and over until the curators are satisfied. Each time the curators send a "reason for return" via API, that reason is sent by email and is persisted into the database, stored at the dataset version level. Note the reason is required, unless the `disable-return-to-author-reason` feature flag has been set (see :ref:`feature-flags`). Reason is a free text field and could be as simple as "The author would like to modify his dataset", "Files are missing", "Nothing to report" or "A curation report with comments and suggestions/instructions will follow in another email" that suits your situation. +The :ref:`send-feedback-admin` Admin only API call may be useful as a way to move the conversation to email. However, note that these emails go to contacts (versus authors) and there is no database record of the email contents. (:ref:`dataverse.mail.cc-support-on-contact-email` will send a copy of these emails to the support email address which would provide a record.) The :ref:`send-feedback` API call may be useful as a way to move the conversation to email. However, note that these emails go to contacts (versus authors) and there is no database record of the email contents. (:ref:`dataverse.mail.cc-support-on-contact-email` will send a copy of these emails to the support email address which would provide a record.) Link a Dataset @@ -2943,15 +3199,23 @@ Retrieve Signposting Information Dataverse supports :ref:`discovery-sign-posting` as a discovery mechanism. Signposting involves the addition of a `Link `__ HTTP header providing summary information on GET and HEAD requests to retrieve the dataset page and a separate /linkset API call to retrieve additional information. -Here is an example of a "Link" header: +Signposting Link HTTP Header +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Here is an example of a HTTP "Link" header from a GET or HEAD request for a dataset landing page: -``Link: ;rel="cite-as", ;rel="describedby";type="application/vnd.citationstyles.csl+json",;rel="describedby";type="application/ld+json", ;rel="type",;rel="type", ;rel="license", ; rel="linkset";type="application/linkset+json"`` +``Link: ;rel="cite-as", ;rel="describedby";type="application/vnd.citationstyles.csl+json",;rel="describedby";type="application/json",;rel="describedby";type="application/xml",;rel="describedby";type="application/xml",;rel="describedby";type="application/xml",;rel="describedby";type="application/ld+json",;rel="describedby";type="application/xml",;rel="describedby";type="application/xml",;rel="describedby";type="text/html",;rel="describedby";type="application/json",;rel="describedby";type="application/xml", ;rel="type",;rel="type", ;rel="license", ; rel="linkset";type="application/linkset+json"`` -The URL for linkset information is discoverable under the ``rel="linkset";type="application/linkset+json`` entry in the "Link" header, such as in the example above. +The URL for linkset information (described below) is discoverable under the ``rel="linkset";type="application/linkset+json`` entry in the "Link" header, such as in the example above. + +Signposting Linkset API Endpoint +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The reponse includes a JSON object conforming to the `Signposting `__ specification. As part of this conformance, unlike most Dataverse API responses, the output is not wrapped in a ``{"status":"OK","data":{`` object. Signposting is not supported for draft dataset versions. +Like :ref:`get-export-formats`, this API can be used to get URLs to dataset metadata export formats, but with URLs for the dataset in question. + .. code-block:: bash export SERVER_URL=https://demo.dataverse.org @@ -3250,6 +3514,36 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/datasetTypes/3" +.. _api-link-dataset-type: + +Link Dataset Type with Metadata Blocks +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Linking a dataset type with one or more metadata blocks results in additional fields from those blocks appearing in the output from the :ref:`list-metadata-blocks-for-a-collection` API endpoint. The new frontend for Dataverse (https://github.com/IQSS/dataverse-frontend) uses the JSON output from this API endpoint to construct the page that users see when creating or editing a dataset. Once the frontend has been updated to pass in the dataset type (https://github.com/IQSS/dataverse-client-javascript/issues/210), specifying a dataset type in this way can be an alternative way to display additional metadata fields than the traditional method, which is to enable a metadata block at the collection level (see :ref:`define-metadata-blocks-for-a-dataverse-collection`). + +For example, a superuser could create a type called "software" and link it to the "CodeMeta" metadata block (this example is below). Then, once the new frontend allows it, the user can specify that they want to create a dataset of type software and see the additional metadata fields from the CodeMeta block when creating or editing their dataset. + +This API endpoint is for superusers only. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export TYPE=software + export JSON='["codeMeta20"]' + + curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-Type: application/json" "$SERVER_URL/api/datasets/datasetTypes/$TYPE" -X PUT -d $JSON + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Content-Type: application/json" "https://demo.dataverse.org/api/datasets/datasetTypes/software" -X PUT -d '["codeMeta20"]' + +To update the blocks that are linked, send an array with those blocks. + +To remove all links to blocks, send an empty array. + Files ----- @@ -3649,7 +3943,7 @@ The fully expanded example above (without environment variables) looks like this Currently the following methods are used to detect file types: - The file type detected by the browser (or sent via API). -- Custom code that reads the first few bytes. As explained at :ref:`s3-direct-upload-features-disabled`, this method of file type detection is not utilized during direct upload to S3, since by nature of direct upload Dataverse never sees the contents of the file. However, this code is utilized when the "redetect" API is used. +- Custom code that reads the first few bytes. As explained at :ref:`s3-direct-upload-features-disabled`, most of these methods are not utilized during direct upload to S3, since by nature of direct upload Dataverse never sees the contents of the file. However, this code is utilized when the "redetect" API is used. - JHOVE: https://jhove.openpreservation.org . Note that the same applies about direct upload to S3 and the "redetect" API. - The file extension (e.g. ".ipybn") is used, defined in a file called ``MimeTypeDetectionByFileExtension.properties``. - The file name (e.g. "Dockerfile") is used, defined in a file called ``MimeTypeDetectionByFileName.properties``. @@ -4550,12 +4844,12 @@ The JSON representation of a role (``roles.json``) looks like this:: { "alias": "sys1", - "name": โ€œRestricted System Roleโ€, - "description": โ€œA person who may only add datasets.โ€, + "name": "Restricted System Role", + "description": "A person who may only add datasets.", "permissions": [ "AddDataset" ] - } + } .. note:: alias is constrained to a length of 16 characters @@ -4564,17 +4858,49 @@ Create Role Roles can be created globally (:ref:`create-global-role`) or for individual Dataverse collections (:ref:`create-role-in-collection`). +.. _show-role: + Show Role ~~~~~~~~~ -Shows the role with ``id``:: +You must have ``ManageDataversePermissions`` to be able to show a role that was created using :ref:`create-role-in-collection`. Global roles (:ref:`create-global-role`) can only be shown with a superuser API token. - GET http://$SERVER/api/roles/$id +An example using a role alias: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ALIAS=sys1 + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/roles/:alias?alias=$ALIAS" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/roles/:alias?alias=sys1" + +An example using a role id: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=11 + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/roles/$ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/roles/11" Delete Role ~~~~~~~~~~~ -A curl example using an ``ID`` +An example using a role id: .. code-block:: bash @@ -4590,13 +4916,13 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/roles/24" -A curl example using a Role alias ``ALIAS`` +An example using a role alias: .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org - export ALIAS=roleAlias + export ALIAS=sys1 curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/roles/:alias?alias=$ALIAS" @@ -4604,8 +4930,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/roles/:alias?alias=roleAlias" - + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/roles/:alias?alias=sys1" Explicit Groups --------------- @@ -4890,12 +5215,14 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/info/settings/:MaxEmbargoDurationInMonths" -Get Export Formats -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. _get-export-formats: + +Get Dataset Metadata Export Formats +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Get the available export formats, including custom formats. +Get the available dataset metadata export formats, including formats from external exporters (see :ref:`available-exporters`). -The response contains an object with available format names as keys, and as values an object with the following properties: +The response contains a JSON object with the available format names as keys (these can be passed to :ref:`export-dataset-metadata-api`), and values as objects with the following properties: * ``displayName`` * ``mediaType`` @@ -5002,6 +5329,27 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasetfields/facetables" +.. _setDisplayOnCreate: + +Set displayOnCreate for a Dataset Field +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Set displayOnCreate for a dataset field. See also :doc:`/admin/metadatacustomization` in the Admin Guide. + +.. code-block:: bash + + export SERVER_URL=http://localhost:8080 + export FIELD=subtitle + export BOOLEAN=true + + curl -X POST "$SERVER_URL/api/admin/datasetfield/setDisplayOnCreate?datasetFieldType=$FIELD&setDisplayOnCreate=$BOOLEAN" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -X POST "http://localhost:8080/api/admin/datasetfield/setDisplayOnCreate?datasetFieldType=studyAssayCellType&setDisplayOnCreate=true" + .. _Notifications: Notifications @@ -5713,22 +6061,43 @@ Creates a global role in the Dataverse installation. The data POSTed are assumed .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - export SERVER_URL=https://demo.dataverse.org - export ID=root + export SERVER_URL=http://localhost:8080 + + curl -H "Content-Type: application/json" -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/admin/roles" --upload-file roles.json - curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/admin/roles" --upload-file roles.json +``roles.json`` see :ref:`json-representation-of-a-role` + +Update Global Role +~~~~~~~~~~~~~~~~~~ + +Update a global role in the Dataverse installation. The PUTed data is assumed to be a complete JSON role as it will overwrite the existing role. :: + + PUT http://$SERVER/api/admin/roles/$ID + +A curl example using an ``ID`` + +.. code-block:: bash + + export SERVER_URL=http://localhost:8080 + export ID=24 + + curl -H "Content-Type: application/json" -X PUT "$SERVER_URL/api/admin/roles/$ID" --upload-file roles.json ``roles.json`` see :ref:`json-representation-of-a-role` Delete Global Role ~~~~~~~~~~~~~~~~~~ +Deletes an ``DataverseRole`` whose ``id`` is passed. :: + + DELETE http://$SERVER/api/admin/roles/$ID + A curl example using an ``ID`` .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - export SERVER_URL=https://demo.dataverse.org + export SERVER_URL=http://localhost:8080 export ID=24 curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/admin/roles/$ID" @@ -6558,10 +6927,10 @@ A curl example using allowing access to a dataset's metadata Please see :ref:`dataverse.api.signature-secret` for the configuration option to add a shared secret, enabling extra security. -.. _send-feedback: +.. _send-feedback-admin: -Send Feedback To Contact(s) -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Send Feedback To Contact(s) Admin API +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This API call allows sending an email to the contacts for a collection, dataset, or datafile or to the support email address when no object is specified. The call is protected by the normal /admin API protections (limited to localhost or requiring a separate key), but does not otherwise limit the sending of emails. @@ -6584,6 +6953,44 @@ A curl example using an ``ID`` Note that this call could be useful in coordinating with dataset authors (assuming they are also contacts) as an alternative/addition to the functionality provided by :ref:`return-a-dataset`. +.. _send-feedback: + +Send Feedback To Contact(s) +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This API call allows sending an email to the contacts for a collection, dataset, or datafile or to the support email address when no object is specified. +The call is protected from embedded html in the body as well as the ability to configure body size limits and rate limiting to avoid the potential for spam. + +The call is a POST with a JSON object as input with four keys: +- "targetId" - the id of the collection, dataset, or datafile. Persistent ids and collection aliases are not supported. (Optional) +- "identifier" - the alias of a collection or the persistence id of a dataset or datafile. (Optional) +- "subject" - the email subject line. (Required) +- "body" - the email body to send (Required) +- "fromEmail" - the email to list in the reply-to field. (Dataverse always sends mail from the system email, but does it "on behalf of" and with a reply-to for the specified user. Authenticated users will have the 'fromEmail' filled in from their profile if this field is not specified) + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export JSON='{"targetId":24, "subject":"Data Question", "body":"Please help me understand your data. Thank you!"}' + + curl -X POST -H "X-Dataverse-key:$API_KEY" -H 'Content-Type:application/json' -d "$JSON" "$SERVER_URL/api/sendfeedback" + + +A curl example using a ``Dataverse Alias or Dataset/DataFile PersistentId`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export JSON='{"identifier":"root", "subject":"Data Question", "body":"Please help me understand your data. Thank you!"}' + + curl -X POST -H "X-Dataverse-key:$API_KEY" -H 'Content-Type:application/json' -d "$JSON" "$SERVER_URL/api/sendfeedback" + +Note that this call could be useful in coordinating with dataset authors (assuming they are also contacts) as an alternative/addition to the functionality provided by :ref:`return-a-dataset`. + .. _thumbnail_reset: Reset Thumbnail Failure Flags @@ -6617,6 +7024,8 @@ MyData The MyData API is used to get a list of just the datasets, dataverses or datafiles an authenticated user can edit. +The API excludes dataverses linked to an harvesting client. This results in `a known issue `_ where regular datasets in harvesting dataverses are missing from the results. + A curl example listing objects .. code-block:: bash diff --git a/doc/sphinx-guides/source/container/configbaker-image.rst b/doc/sphinx-guides/source/container/configbaker-image.rst index d098bd46436..09e431eb547 100644 --- a/doc/sphinx-guides/source/container/configbaker-image.rst +++ b/doc/sphinx-guides/source/container/configbaker-image.rst @@ -54,7 +54,7 @@ Scripts - Default script when running container without parameters. Lists available scripts and details about them. * - ``update-fields.sh`` - Update a Solr ``schema.xml`` with a given list of metadata fields. See ``update-fields.sh -h`` for usage details - and :ref:`update-solr-schema` for an example use case. + and example use cases at :ref:`update-solr-schema` and :ref:`update-solr-schema-dev`. Solr Template ^^^^^^^^^^^^^ diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst index b1945070714..2483d3217a5 100644 --- a/doc/sphinx-guides/source/container/running/demo.rst +++ b/doc/sphinx-guides/source/container/running/demo.rst @@ -160,6 +160,11 @@ Next, set up the UI toggle between English and French, again using the unblock k Stop and start the Dataverse container in order for the language toggle to work. +PID Providers ++++++++++++++ + +Dataverse supports multiple Persistent ID (PID) providers. The ``compose.yml`` file uses the Permalink PID provider. Follow :ref:`pids-configuration` to reconfigure as needed. + Next Steps ---------- diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst index f3d98fae0bf..75a50e2513d 100644 --- a/doc/sphinx-guides/source/developers/big-data-support.rst +++ b/doc/sphinx-guides/source/developers/big-data-support.rst @@ -44,7 +44,7 @@ Features that are Disabled if S3 Direct Upload is Enabled The following features are disabled when S3 direct upload is enabled. - Unzipping of zip files. (See :ref:`compressed-files`.) -- Detection of file type based on JHOVE and custom code that reads the first few bytes. (See :ref:`redetect-file-type`.) +- Detection of file type based on JHOVE and custom code that reads the first few bytes except for the refinement of Stata file types to include the version. (See :ref:`redetect-file-type`.) - Extraction of metadata from FITS files. (See :ref:`fits`.) - Creation of NcML auxiliary files (See :ref:`netcdf-and-hdf5`.) - Extraction of a geospatial bounding box from NetCDF and HDF5 files (see :ref:`netcdf-and-hdf5`) unless :ref:`dataverse.netcdf.geo-extract-s3-direct-upload` is set to true. diff --git a/doc/sphinx-guides/source/developers/dataset-migration-api.rst b/doc/sphinx-guides/source/developers/dataset-migration-api.rst index fc86b7ccdcf..941527133ef 100644 --- a/doc/sphinx-guides/source/developers/dataset-migration-api.rst +++ b/doc/sphinx-guides/source/developers/dataset-migration-api.rst @@ -5,10 +5,15 @@ The Dataverse software includes several ways to add Datasets originally created This experimental migration API offers an additional option with some potential advantages: -* metadata can be specified using the json-ld format used in the OAI-ORE metadata export -* existing publication dates and PIDs are maintained (currently limited to the case where the PID can be managed by the Dataverse software, e.g. where the authority and shoulder match those the software is configured for) -* updating the PID at the provider can be done immediately or later (with other existing APIs) -* adding files can be done via the standard APIs, including using direct-upload to S3 +* Metadata can be specified using the json-ld format used in the OAI-ORE metadata export. Please note that the json-ld generated by OAI-ORE metadata export is not directly compatible with the Migration API. OAI-ORE export nests resource metadata under :code:`ore:describes` wrapper and Dataset Migration API requires that metadata is on the root level. Please check example file below for reference. + + * If you need a tool to convert OAI-ORE exported json-ld into a format compatible with the Dataset Migration API, or if you need to generate compatible json-ld from sources other than an existing Dataverse installation, the `BaseX `_ database engine, used together with the XQuery language, provides an efficient solution. Please see example script :download:`transform-oai-ore-jsonld.xq <../_static/api/transform-oai-ore-jsonld.xq>` for a simple conversion from exported OAI-ORE json-ld to a Dataset Migration API -compatible version. + +* Existing publication dates and PIDs are maintained (currently limited to the case where the PID can be managed by the Dataverse software, e.g. where the authority and shoulder match those the software is configured for) + +* Updating the PID at the provider can be done immediately or later (with other existing APIs). + +* Adding files can be done via the standard APIs, including using direct-upload to S3. This API consists of 2 calls: one to create an initial Dataset version, and one to 'republish' the dataset through Dataverse with a specified publication date. Both calls require super-admin privileges. @@ -31,7 +36,13 @@ To import a dataset with an existing persistent identifier (PID), the provided j curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$DATAVERSE_ID/datasets/:startmigration --upload-file dataset-migrate.jsonld -An example jsonld file is available at :download:`dataset-migrate.jsonld <../_static/api/dataset-migrate.jsonld>` . Note that you would need to replace the PID in the sample file with one supported in your Dataverse instance. +An example jsonld file is available at :download:`dataset-migrate.jsonld <../_static/api/dataset-migrate.jsonld>` . Note that you would need to replace the PID in the sample file with one supported in your Dataverse instance. + +You also need to replace the :code:`dataverse.siteUrl` in the json-ld :code:`@context` with your current Dataverse site URL. This is necessary to define a local URI for metadata terms originating from community metadata blocks (in the case of the example file, from the Social Sciences and Humanities and Geospatial blocks). + +Currently, as of Dataverse 6.5 and earlier, community metadata blocks do not assign a default global URI to the terms used in the block in contrast to citation metadata, which has global URI defined. + + Publish a Migrated Dataset -------------------------- diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index aed174f60d4..8f9b43eabcb 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -30,7 +30,24 @@ Early on, make sure it's clear what type of release this is. The steps below des Ensure Issues Have Been Created ------------------------------- -In advance of a release, GitHub issues should have been created already that capture certain steps. See https://github.com/IQSS/dataverse-pm/issues/335 for examples. +Some of the steps in this document are well-served by having their own dedicated GitHub issue. You'll see a label like this on them: + +|dedicated| + +There are a variety of reasons why a step might deserve its own dedicated issue: + +- The step can be done by a team member other than the person doing the release. +- Stakeholders might be interested in the status of a step (e.g. has the released been deployed to the demo site). + +Steps don't get their own dedicated issue if it would be confusing to have multiple people involved. Too many cooks in the kitchen, as they say. Also, some steps are so small the overhead of an issue isn't worth it. + +Before the release even begins you can coordinate with the project manager about the creation of these issues. + +.. |dedicated| raw:: html + + + Dedicated Issue +   Declare a Code Freeze --------------------- @@ -40,18 +57,25 @@ The following steps are made more difficult if code is changing in the "develop" Conduct Performance Testing --------------------------- +|dedicated| + See :doc:`/qa/performance-tests` for details. -Conduct Smoke Testing ---------------------- +Conduct Regression Testing +--------------------------- + +|dedicated| See :doc:`/qa/testing-approach` for details. +Refer to the provided regression checklist for the list of items to verify during the testing process: `Regression Checklist `_. .. _write-release-notes: Write Release Notes ------------------- +|dedicated| + Developers express the need for an addition to release notes by creating a "release note snippet" in ``/doc/release-notes`` containing the name of the issue they're working on. The name of the branch could be used for the filename with ".md" appended (release notes are written in Markdown) such as ``5053-apis-custom-homepage.md``. See :ref:`writing-release-note-snippets` for how this is described for contributors. The task at or near release time is to collect these snippets into a single file. @@ -62,17 +86,22 @@ The task at or near release time is to collect these snippets into a single file - Include instructions describing the steps required to upgrade the application from the previous version. These must be customized for release numbers and special circumstances such as changes to metadata blocks and infrastructure. - Take the release notes .md through the regular Code Review and QA process. That is, make a pull request. Here's an example: https://github.com/IQSS/dataverse/pull/10866 -Upgrade Instructions for Internal ---------------------------------- +Deploy Release Candidate to Internal +------------------------------------ + +|dedicated| To upgrade internal, go to /doc/release-notes, open the release-notes.md file for the current release and perform all the steps under "Upgrade Instructions". Deploy Release Candidate to Demo -------------------------------- +|dedicated| + First, build the release candidate. ssh into the dataverse-internal server and undeploy the current war file. +Go to /doc/release-notes, open the release-notes.md file for the current release, and perform all the steps under "Upgrade Instructions". Go to https://jenkins.dataverse.org/job/IQSS_Dataverse_Internal/ and make the following adjustments to the config: @@ -91,6 +120,8 @@ ssh into the demo server and follow the upgrade instructions in the release note Prepare Release Branch ---------------------- +|dedicated| + The release branch will have the final changes such as bumping the version number. Usually we branch from the "develop" branch to create the release branch. If we are creating a hotfix for a particular version (5.11, for example), we branch from the tag (e.g. ``v5.11``). @@ -116,18 +147,20 @@ Return to the parent pom and make the following change, which is necessary for p (Before you make this change the value should be ``${parsedVersion.majorVersion}.${parsedVersion.nextMinorVersion}``. Later on, after cutting a release, we'll change it back to that value.) -For a regular release, make the changes above in the release branch you created, make a pull request, and merge it into the "develop" branch. Like usual, you can safely delete the branch after the merge is complete. +For a regular release, make the changes above in the release branch you created, but hold off for a moment on making a pull requests because Jenkins will fail because it will be testing the previous release. -If you are making a hotfix release, make the pull request against the "master" branch. Do not delete the branch after merging because we will later merge it into the "develop" branch to pick up the hotfix. More on this later. +In the dataverse-ansible repo make bump the version in `jenkins.yml `_ and make a pull request such as https://github.com/gdcc/dataverse-ansible/pull/386. Wait for it to be merged. Note that bumping on the Jenkins side like this will mean that all pull requests will show failures in Jenkins until they are updated to the version we are releasing. -Either way, as usual, you should ensure that all tests are passing. Please note that you will need to bump the version in `jenkins.yml `_ in dataverse-ansible to get the tests to pass. Consider doing this before making the pull request. Alternatively, you can bump jenkins.yml after making the pull request and re-run the Jenkins job to make sure tests pass. +Once dataverse-ansible has been merged, return to the branch you created above ("10852-bump-to-6.4" or whatever) and make a pull request. Ensure that all tests are passing and then put the PR through the normal review and QA process. + +If you are making a hotfix release, make the pull request against the "master" branch. Do not delete the branch after merging because we will later merge it into the "develop" branch to pick up the hotfix. More on this later. Merge "develop" into "master" ----------------------------- If this is a regular (non-hotfix) release, create a pull request to merge the "develop" branch into the "master" branch using this "compare" link: https://github.com/IQSS/dataverse/compare/master...develop -Once important tests have passed (compile, unit tests, etc.), merge the pull request. Don't worry about style tests failing such as for shell scripts. +Once important tests have passed (compile, unit tests, etc.), merge the pull request (skipping code review is ok). Don't worry about style tests failing such as for shell scripts. If this is a hotfix release, skip this whole "merge develop to master" step (the "develop" branch is not involved until later). @@ -160,7 +193,7 @@ Go to https://jenkins.dataverse.org/job/guides.dataverse.org/ and make the follo - Repository URL: ``https://github.com/IQSS/dataverse.git`` - Branch Specifier (blank for 'any'): ``*/master`` -- ``VERSION`` (under "Build Steps"): ``5.10.1`` (for example) +- ``VERSION`` (under "Build Steps"): bump to the next release. Don't prepend a "v". Use ``5.10.1`` (for example) Click "Save" then "Build Now". @@ -265,24 +298,37 @@ Close Milestone on GitHub and Create a New One You can find our milestones at https://github.com/IQSS/dataverse/milestones -Now that we've published the release, close the milestone and create a new one. +Now that we've published the release, close the milestone and create a new one for the **next** release, the release **after** the one we're working on, that is. Note that for milestones we use just the number without the "v" (e.g. "5.10.1"). +On the project board at https://github.com/orgs/IQSS/projects/34 edit the tab (view) that shows the milestone to show the next milestone. + Update the Container Base Image Version Property ------------------------------------------------ +|dedicated| + Create a new branch (any name is fine but ``prepare-next-iteration`` is suggested) and update the following files to prepare for the next development cycle: - modules/dataverse-parent/pom.xml -> ```` -> profile "ct" -> ```` -> Set ```` to ``${parsedVersion.majorVersion}.${parsedVersion.nextMinorVersion}`` -Now create a pull request and merge it. +Create a pull request and put it through code review, like usual. Give it a milestone of the next release, the one **after** the one we're working on. Once the pull request has been approved, merge it. It should the the first PR merged of the next release. For more background, see :ref:`base-supported-image-tags`. For an example, see https://github.com/IQSS/dataverse/pull/10896 +Lift the Code Freeze and Encourage Developers to Update Their Branches +---------------------------------------------------------------------- + +It's now safe to lift the code freeze. We can start merging pull requests into the "develop" branch for the next release. + +Let developers know that they should merge the latest from the "develop" branch into any branches they are working on. + Deploy Final Release on Demo ---------------------------- +|dedicated| + Above you already did the hard work of deploying a release candidate to https://demo.dataverse.org. It should be relatively straightforward to undeploy the release candidate and deploy the final release. Update SchemaSpy @@ -316,6 +362,11 @@ Announce the Release on the Mailing List Post a message at https://groups.google.com/g/dataverse-community +Announce the Release on Zulip +----------------------------- + +Post a message under #community at https://dataverse.zulipchat.com + For Hotfixes, Merge Hotfix Branch into "develop" and Rename SQL Scripts ----------------------------------------------------------------------- diff --git a/doc/sphinx-guides/source/developers/tips.rst b/doc/sphinx-guides/source/developers/tips.rst index f5ffbac0c07..8d715c7d016 100755 --- a/doc/sphinx-guides/source/developers/tips.rst +++ b/doc/sphinx-guides/source/developers/tips.rst @@ -185,7 +185,24 @@ Solr Once some Dataverse collections, datasets, and files have been created and indexed, you can experiment with searches directly from Solr at http://localhost:8983/solr/#/collection1/query and look at the JSON output of searches, such as this wildcard search: http://localhost:8983/solr/collection1/select?q=*%3A*&wt=json&indent=true . You can also get JSON output of static fields Solr knows about: http://localhost:8983/solr/collection1/schema/fields -You can simply double-click "start.jar" rather that running ``java -jar start.jar`` from the command line. Figuring out how to stop Solr after double-clicking it is an exercise for the reader. +You can simply double-click "start.jar" rather than running ``java -jar start.jar`` from the command line. Figuring out how to stop Solr after double-clicking it is an exercise for the reader. + +.. _update-solr-schema-dev: + +Updating the Solr Schema (Developers) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Both developers and sysadmins need to update the Solr schema from time to time. One difference is that developers will be committing changes to ``conf/solr/schema.xml`` in git. To prevent cross-platform differences in the git history, when running the ``update-fields.sh`` script, we ask all developers to run the script from within Docker. (See :doc:`/container/configbaker-image` for more on the image we'll use below.) + +.. code-block:: + + curl http://localhost:8080/api/admin/index/solr/schema | docker run -i --rm -v ./docker-dev-volumes/solr/data:/var/solr gdcc/configbaker:unstable update-fields.sh /var/solr/data/collection1/conf/schema.xml + + cp docker-dev-volumes/solr/data/data/collection1/conf/schema.xml conf/solr/schema.xml + +At this point you can do a ``git diff`` and see if your changes make sense before committing. + +Sysadmins are welcome to run ``update-fields.sh`` however they like. See :ref:`update-solr-schema` in the Admin Guide for details. Git --- diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 6fd40b8015b..05a17992acf 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -307,7 +307,7 @@ to be compatible with the MicroProfile specification which means that Global Settings ^^^^^^^^^^^^^^^ -The following three global settings are required to configure PID Providers in the Dataverse software: +The following two global settings are required to configure PID Providers in the Dataverse software: .. _dataverse.pid.providers: @@ -581,6 +581,7 @@ Note: - If you configure ``base-url``, it should include a "/" after the hostname like this: ``https://demo.dataverse.org/``. - When using multiple PermaLink providers, you should avoid ambiguous authority/separator/shoulder combinations that would result in the same overall prefix. +- Configuring PermaLink providers differing only by their separator values is not supported. - In general, PermaLink authority/shoulder values should be alphanumeric. For other cases, admins may need to consider the potential impact of special characters in S3 storage identifiers, resolver URLs, exports, etc. .. _dataverse.pid.*.handlenet: @@ -1093,6 +1094,8 @@ The Dataverse Software S3 driver supports multi-part upload for large files (ove First: Set Up Accounts and Access Credentials ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +**Note:** As of version 5.14, if Dataverse is running in an EC2 instance it will prefer Role-Based Access Control over the S3 default profile, even if administrators configure Dataverse with programmatic access keys. Named profiles can still be used to override RBAC for specific datastores. RBAC is preferential from a security perspective as there are no keys to rotate or have stolen. If you intend to assign a role to your EC2 instance, you will still need the ``~/.aws/config`` file to specify the region but you need not generate credentials for the default profile. For more information please see https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2.html + The Dataverse Software and the AWS SDK make use of the "AWS credentials profile file" and "AWS config profile file" located in ``~/.aws/`` where ``~`` is the home directory of the user you run Payara as. This file can be generated via either of two methods described below: @@ -1116,13 +1119,6 @@ To **create a user** with full S3 access and nothing more for security reasons, for more info on this process. To use programmatic access, **Generate the user keys** needed for a Dataverse installation afterwards by clicking on the created user. -(You can skip this step when running on EC2, see below.) - -.. TIP:: - If you are hosting your Dataverse installation on an AWS EC2 instance alongside storage in S3, it is possible to use IAM Roles instead - of the credentials file (the file at ``~/.aws/credentials`` mentioned below). Please note that you will still need the - ``~/.aws/config`` file to specify the region. For more information on this option, see - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2.html Preparation When Using Custom S3-Compatible Service ################################################### @@ -1854,6 +1850,128 @@ For Google Analytics, the example script at :download:`analytics-code.html + + + +2. Add to ``analytics-code.html``: + +```` + +3. Go to https://playground.cookieconsent.orestbida.com to configure, download and copy contents of ``cookieconsent-config.js`` to ``analytics-code.html``. It should look something like this: + +.. code-block:: html + + + +After restarting or reloading Dataverse the cookie consent popup should appear, looking something like this: + +|cookieconsent| + +.. |cookieconsent| image:: ./img/cookie-consent-example.png + :class: img-responsive + +If you change the cookie consent config in ``CookieConsent.run()`` and want to test your changes, you should remove the cookie called ``cc_cookie`` in your browser and reload the Dataverse page to have the popup appear again. To remove cookies use Application > Cookies in the Chrome/Edge dev tool, and Storage > Cookies in Firefox and Safari. + .. _license-config: Configuring Licenses @@ -3361,9 +3479,6 @@ please find all known feature flags below. Any of these flags can be activated u * - reduce-solr-deletes - Avoids deleting and recreating solr documents for dataset files when reindexing. - ``Off`` - * - reduce-solr-deletes - - Avoids deleting and recreating solr documents for dataset files when reindexing. - - ``Off`` * - disable-return-to-author-reason - Removes the reason field in the `Publish/Return To Author` dialog that was added as a required field in v6.2 and makes the reason an optional parameter in the :ref:`return-a-dataset` API call. - ``Off`` @@ -4427,7 +4542,12 @@ This is enabled via the new setting `:MDCStartDate` that specifies the cut-over ``curl -X PUT -d '2019-10-01' http://localhost:8080/api/admin/settings/:MDCStartDate`` +:ContactFeedbackMessageSizeLimit +++++++++++++++++++++++++++++++++ + +Maximum length of the text body that can be sent to the contacts of a Collection, Dataset, or DataFile. Setting this limit to Zero will denote unlimited length. +``curl -X PUT -d 1080 http://localhost:8080/api/admin/settings/:ContactFeedbackMessageSizeLimit`` .. _:Languages: @@ -4662,6 +4782,9 @@ The commands below should give you an idea of how to load the configuration, but ``curl -X PUT --upload-file cvoc-conf.json http://localhost:8080/api/admin/settings/:CVocConf`` +Since external vocabulary scripts can change how fields are indexed (storing an identifier and name and/or values in different languages), +updating the Solr schema as described in :ref:`update-solr-schema` should be done after adding new scripts to your configuration. + .. _:ControlledVocabularyCustomJavaScript: :ControlledVocabularyCustomJavaScript diff --git a/doc/sphinx-guides/source/installation/img/cookie-consent-example.png b/doc/sphinx-guides/source/installation/img/cookie-consent-example.png new file mode 100644 index 00000000000..0dfe1fb113b Binary files /dev/null and b/doc/sphinx-guides/source/installation/img/cookie-consent-example.png differ diff --git a/doc/sphinx-guides/source/user/appendix.rst b/doc/sphinx-guides/source/user/appendix.rst index df9b6704209..4521b781475 100755 --- a/doc/sphinx-guides/source/user/appendix.rst +++ b/doc/sphinx-guides/source/user/appendix.rst @@ -30,6 +30,7 @@ Detailed below are what metadata schemas we support for Citation and Domain Spec `Virtual Observatory (VO) Discovery and Provenance Metadata `__. - Life Sciences Metadata (`see .tsv `__): based on `ISA-Tab Specification `__, along with controlled vocabulary from subsets of the `OBI Ontology `__ and the `NCBI Taxonomy for Organisms `__. - Journal Metadata (`see .tsv `__): based on the `Journal Archiving and Interchange Tag Set, version 1.2 `__. +- 3D Objects Metadata (`see .tsv `__). Experimental Metadata ~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index b3a14554b40..1418aa627d5 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -43,6 +43,8 @@ Additional formats can be enabled. See :ref:`inventory-of-external-exporters` in Each of these metadata exports contains the metadata of the most recently published version of the dataset. +For each dataset, links to each enabled metadata format are available programmatically via Signposting. For details, see :ref:`discovery-sign-posting` in the Admin Guide and :ref:`signposting-api` in the API Guide. + .. _adding-new-dataset: Adding a New Dataset @@ -50,8 +52,10 @@ Adding a New Dataset #. Navigate to the Dataverse collection in which you want to add a dataset. #. Click on the "Add Data" button and select "New Dataset" in the dropdown menu. **Note:** If you are on the root Dataverse collection, your My Data page or click the "Add Data" link in the navbar, the dataset you create will be hosted in the root Dataverse collection. You can change this by selecting another Dataverse collection you have proper permissions to create datasets in, from the Host Dataverse collection dropdown in the create dataset form. This option to choose will not be available after you create the dataset. -#. To quickly get started, enter at minimum all the required fields with an asterisk (e.g., the Dataset Title, Author Name, - Description Text, Point of Contact Email, and Subject) to get a Data Citation with a DOI. +#. To quickly get started, enter at minimum all the required fields with an asterisk (e.g., the Dataset Title, Author Name, Description Text, Point of Contact Email, and Subject) to get a Data Citation with a DOI. + + #. When entering author identifiers, select the type from the dropdown (e.g. "ORCID") and under "Identifier" enter just the unique identifier (e.g. "0000-0002-1825-0097") rather than the full URL (e.g. "https://orcid.org/0000-0002-1825-0097"). + #. Scroll down to the "Files" section and click on "Select Files to Add" to add all the relevant files to your Dataset. You can also upload your files directly from your Dropbox. **Tip:** You can drag and drop or select multiple files at a time from your desktop directly into the upload widget. Your files will appear below the "Select Files to Add" button where you can add a @@ -681,17 +685,26 @@ If you have a Contributor role (can edit metadata, upload files, and edit files, Preview URL to Review Unpublished Dataset ========================================= -Creating a Preview URL for your dataset allows you to share your dataset (for viewing and downloading of files) before it is published to a wide group of individuals who may not have a user account on the Dataverse installation. Anyone you send the Preview URL to will not have to log into the Dataverse installation to view the dataset. +Creating a Preview URL for a draft version of your dataset allows you to share your dataset (for viewing and downloading of files) before it is published to a wide group of individuals who may not have a user account on the Dataverse installation. Anyone you send the Preview URL to will not have to log into the Dataverse installation to view the unpublished dataset. Once a dataset has been published you may create new General Preview URLs for subsequent draft versions, but the Anonymous Preview URL will no longer be available. -**Note:** To create a Preview URL, you must have the *ManageDatasetPermissions* permission for your dataset, usually given by the :ref:`roles ` *Curator* or *Administrator*. +**Note:** To create a Preview URL, you must have the *ManageDatasetPermissions* permission for your draft dataset, usually given by the :ref:`roles ` *Curator* or *Administrator*. #. Go to your unpublished dataset #. Select the โ€œEditโ€ button #. Select โ€œPreview URLโ€ in the dropdown menu -#. In the pop-up select โ€œCreate General Preview URLโ€ or "Create URL for Anonymized Access". The latter supports anonymous review by removing author names and other potentially identifying information from citations, version history tables, and some metadata fields (as configured by the administrator). +#. In the pop-up select โ€œCreate General Preview URLโ€ or "Create Anonymous Preview URL". The latter supports anonymous review by removing author names and other potentially identifying information from citations, version history tables, and some metadata fields (as configured by the administrator). #. Copy the Preview URL which has been created for this dataset and it can now be shared with anyone you wish to have access to view or download files in your unpublished dataset. To disable a Preview URL and to revoke access, follow the same steps as above until step #3 when you return to the popup, click the โ€œDisable Preview URLโ€ button. + +**Note:** Before distributing an anonymized Preview URL it is recommended that you view the dataset as a potential user to verify that the metadata available does not reveal authorship, etc. + +#. Create Anonymous Preview URL for your unpublished dataset via the Preview URL popup from Edit Dataset button +#. Copy the Anonymous Preview URL to your clipboard +#. Log out of Dataverse application +#. Open the dataset using the Anonymous Preview URL you plan to distribute to view it as a reviewer would. +#. It may be necessary for you to further edit your draft dataset's metadata to remove identifying items before you distribute the Anonymous Preview URL + Note that only one Preview URL (normal or with anonymized access) can be configured per dataset at a time. Embargoes @@ -790,13 +803,15 @@ If you deaccession the most recently published version of the dataset but not al Dataset Types ============= +.. note:: Development of the dataset types feature is ongoing. Please see https://github.com/IQSS/dataverse-pm/issues/307 for details. + Out of the box, all datasets have a dataset type of "dataset". Superusers can add additional types such as "software" or "workflow" using the :ref:`api-add-dataset-type` API endpoint. Once more than one type appears in search results, a facet called "Dataset Type" will appear allowing you to filter down to a certain type. If your installation is configured to use DataCite as a persistent ID (PID) provider, the appropriate type ("Dataset", "Software", "Workflow") will be sent to DataCite when the dataset is published for those three types. -Currently, the dataset type can only be specified via API and only when the dataset is created. For details, see the following sections of the API guide: +Currently, specifying a type for a dataset can only be done via API and only when the dataset is created. The type can't currently be changed afterward. For details, see the following sections of the API guide: - :ref:`api-create-dataset-with-type` (Native API) - :ref:`api-semantic-create-dataset-with-type` (Semantic API) @@ -804,7 +819,7 @@ Currently, the dataset type can only be specified via API and only when the data Dataset types can be listed, added, or deleted via API. See :ref:`api-dataset-types` in the API Guide for more. -Development of the dataset types feature is ongoing. Please see https://github.com/IQSS/dataverse/issues/10489 for details. +Dataset types can be linked with metadata blocks to make fields from those blocks available when datasets of that type are created or edited. See :ref:`api-link-dataset-type` and :ref:`list-metadata-blocks-for-a-collection` for details. .. |image1| image:: ./img/DatasetDiagram.png :class: img-responsive diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index ce181d27887..fdde14cdee5 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -90,6 +90,8 @@ services: - dev networks: - dataverse + volumes: + - ./docker-dev-volumes/solr/data:/var/solr dev_dv_initializer: container_name: "dev_dv_initializer" diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index 60ed130612e..bc0fe9825ba 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -20,12 +20,12 @@ services: -Ddataverse.files.file1.type=file -Ddataverse.files.file1.label=Filesystem -Ddataverse.files.file1.directory=${STORAGE_DIR}/store - -Ddataverse.pid.providers=fake - -Ddataverse.pid.default-provider=fake - -Ddataverse.pid.fake.type=FAKE - -Ddataverse.pid.fake.label=FakeDOIProvider - -Ddataverse.pid.fake.authority=10.5072 - -Ddataverse.pid.fake.shoulder=FK2/ + -Ddataverse.pid.providers=perma1 + -Ddataverse.pid.default-provider=perma1 + -Ddataverse.pid.perma1.type=perma + -Ddataverse.pid.perma1.label=Perma1 + -Ddataverse.pid.perma1.authority=DV + -Ddataverse.pid.perma1.permalink.separator=/ #-Ddataverse.lang.directory=/dv/lang ports: - "8080:8080" # HTTP (Dataverse Application) diff --git a/scripts/api/data/dataset-create-new-additional-default-fields.json b/scripts/api/data/dataset-create-new-additional-default-fields.json new file mode 100644 index 00000000000..30d6bde4355 --- /dev/null +++ b/scripts/api/data/dataset-create-new-additional-default-fields.json @@ -0,0 +1,1533 @@ +{ + "datasetVersion": { + "license": { + "name": "CC0 1.0", + "uri": "http://creativecommons.org/publicdomain/zero/1.0" + }, + "metadataBlocks": { + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "typeName": "title", + "multiple": false, + "typeClass": "primitive", + "value": "Replication Data for: Title" + }, + { + "typeName": "subtitle", + "multiple": false, + "typeClass": "primitive", + "value": "Subtitle" + }, + { + "typeName": "alternativeTitle", + "multiple": true, + "typeClass": "primitive", + "value": ["Alternative Title"] + }, + { + "typeName": "alternativeURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://AlternativeURL.org" + }, + { + "typeName": "otherId", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "otherIdAgency": { + "typeName": "otherIdAgency", + "multiple": false, + "typeClass": "primitive", + "value": "OtherIDAgency1" + }, + "otherIdValue": { + "typeName": "otherIdValue", + "multiple": false, + "typeClass": "primitive", + "value": "OtherIDIdentifier1" + } + }, + { + "otherIdAgency": { + "typeName": "otherIdAgency", + "multiple": false, + "typeClass": "primitive", + "value": "OtherIDAgency2" + }, + "otherIdValue": { + "typeName": "otherIdValue", + "multiple": false, + "typeClass": "primitive", + "value": "OtherIDIdentifier2" + } + } + ] + }, + { + "typeName": "author", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "LastAuthor1, FirstAuthor1" + }, + "authorAffiliation": { + "typeName": "authorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "AuthorAffiliation1" + }, + "authorIdentifierScheme": { + "typeName": "authorIdentifierScheme", + "multiple": false, + "typeClass": "controlledVocabulary", + "value": "ORCID" + }, + "authorIdentifier": { + "typeName": "authorIdentifier", + "multiple": false, + "typeClass": "primitive", + "value": "AuthorIdentifier1" + } + }, + { + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "LastAuthor2, FirstAuthor2" + }, + "authorAffiliation": { + "typeName": "authorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "AuthorAffiliation2" + }, + "authorIdentifierScheme": { + "typeName": "authorIdentifierScheme", + "multiple": false, + "typeClass": "controlledVocabulary", + "value": "ISNI" + }, + "authorIdentifier": { + "typeName": "authorIdentifier", + "multiple": false, + "typeClass": "primitive", + "value": "AuthorIdentifier2" + } + } + ] + }, + { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "datasetContactName": { + "typeName": "datasetContactName", + "multiple": false, + "typeClass": "primitive", + "value": "LastContact1, FirstContact1" + }, + "datasetContactAffiliation": { + "typeName": "datasetContactAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "ContactAffiliation1" + }, + "datasetContactEmail": { + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive", + "value": "ContactEmail1@mailinator.com" + } + }, + { + "datasetContactName": { + "typeName": "datasetContactName", + "multiple": false, + "typeClass": "primitive", + "value": "LastContact2, FirstContact2" + }, + "datasetContactAffiliation": { + "typeName": "datasetContactAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "ContactAffiliation2" + }, + "datasetContactEmail": { + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive", + "value": "ContactEmail2@mailinator.com" + } + } + ] + }, + { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "multiple": false, + "typeClass": "primitive", + "value": "DescriptionText1" + }, + "dsDescriptionDate": { + "typeName": "dsDescriptionDate", + "multiple": false, + "typeClass": "primitive", + "value": "1000-01-01" + } + }, + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "multiple": false, + "typeClass": "primitive", + "value": "DescriptionText2" + }, + "dsDescriptionDate": { + "typeName": "dsDescriptionDate", + "multiple": false, + "typeClass": "primitive", + "value": "1000-02-02" + } + } + ] + }, + { + "typeName": "subject", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Agricultural Sciences", + "Business and Management", + "Engineering", + "Law" + ] + }, + { + "typeName": "keyword", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "keywordValue": { + "typeName": "keywordValue", + "multiple": false, + "typeClass": "primitive", + "value": "KeywordTerm1" + }, + "keywordTermURI": { + "typeName": "keywordTermURI", + "multiple": false, + "typeClass": "primitive", + "value": "http://keywordTermURI1.org" + }, + "keywordVocabulary": { + "typeName": "keywordVocabulary", + "multiple": false, + "typeClass": "primitive", + "value": "KeywordVocabulary1" + }, + "keywordVocabularyURI": { + "typeName": "keywordVocabularyURI", + "multiple": false, + "typeClass": "primitive", + "value": "http://KeywordVocabularyURL1.org" + } + }, + { + "keywordValue": { + "typeName": "keywordValue", + "multiple": false, + "typeClass": "primitive", + "value": "KeywordTerm2" + }, + "keywordTermURI": { + "typeName": "keywordTermURI", + "multiple": false, + "typeClass": "primitive", + "value": "http://keywordTermURI2.org" + }, + "keywordVocabulary": { + "typeName": "keywordVocabulary", + "multiple": false, + "typeClass": "primitive", + "value": "KeywordVocabulary2" + }, + "keywordVocabularyURI": { + "typeName": "keywordVocabularyURI", + "multiple": false, + "typeClass": "primitive", + "value": "http://KeywordVocabularyURL2.org" + } + } + ] + }, + { + "typeName": "topicClassification", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "topicClassValue": { + "typeName": "topicClassValue", + "multiple": false, + "typeClass": "primitive", + "value": "Topic Classification Term1" + }, + "topicClassVocab": { + "typeName": "topicClassVocab", + "multiple": false, + "typeClass": "primitive", + "value": "Topic Classification Vocab1" + }, + "topicClassVocabURI": { + "typeName": "topicClassVocabURI", + "multiple": false, + "typeClass": "primitive", + "value": "https://TopicClassificationURL1.com" + } + }, + { + "topicClassValue": { + "typeName": "topicClassValue", + "multiple": false, + "typeClass": "primitive", + "value": "Topic Classification Term2" + }, + "topicClassVocab": { + "typeName": "topicClassVocab", + "multiple": false, + "typeClass": "primitive", + "value": "Topic Classification Vocab2" + }, + "topicClassVocabURI": { + "typeName": "topicClassVocabURI", + "multiple": false, + "typeClass": "primitive", + "value": "https://TopicClassificationURL2.com" + } + } + ] + }, + { + "typeName": "publication", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "publicationRelationType" : { + "typeName" : "publicationRelationType", + "multiple" : false, + "typeClass" : "controlledVocabulary", + "value" : "IsSupplementTo" + }, + "publicationCitation": { + "typeName": "publicationCitation", + "multiple": false, + "typeClass": "primitive", + "value": "RelatedPublicationCitation1" + }, + "publicationIDType": { + "typeName": "publicationIDType", + "multiple": false, + "typeClass": "controlledVocabulary", + "value": "ark" + }, + "publicationIDNumber": { + "typeName": "publicationIDNumber", + "multiple": false, + "typeClass": "primitive", + "value": "RelatedPublicationIDNumber1" + }, + "publicationURL": { + "typeName": "publicationURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://RelatedPublicationURL1.org" + } + }, + { + "publicationCitation": { + "typeName": "publicationCitation", + "multiple": false, + "typeClass": "primitive", + "value": "RelatedPublicationCitation2" + }, + "publicationIDType": { + "typeName": "publicationIDType", + "multiple": false, + "typeClass": "controlledVocabulary", + "value": "arXiv" + }, + "publicationIDNumber": { + "typeName": "publicationIDNumber", + "multiple": false, + "typeClass": "primitive", + "value": "RelatedPublicationIDNumber2" + }, + "publicationURL": { + "typeName": "publicationURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://RelatedPublicationURL2.org" + } + } + ] + }, + { + "typeName": "notesText", + "multiple": false, + "typeClass": "primitive", + "value": "Notes1" + }, + { + "typeName": "language", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Abkhaz", + "Afar" + ] + }, + { + "typeName": "producer", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "producerName": { + "typeName": "producerName", + "multiple": false, + "typeClass": "primitive", + "value": "LastProducer1, FirstProducer1" + }, + "producerAffiliation": { + "typeName": "producerAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "ProducerAffiliation1" + }, + "producerAbbreviation": { + "typeName": "producerAbbreviation", + "multiple": false, + "typeClass": "primitive", + "value": "ProducerAbbreviation1" + }, + "producerURL": { + "typeName": "producerURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://ProducerURL1.org" + }, + "producerLogoURL": { + "typeName": "producerLogoURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://ProducerLogoURL1.org" + } + }, + { + "producerName": { + "typeName": "producerName", + "multiple": false, + "typeClass": "primitive", + "value": "LastProducer2, FirstProducer2" + }, + "producerAffiliation": { + "typeName": "producerAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "ProducerAffiliation2" + }, + "producerAbbreviation": { + "typeName": "producerAbbreviation", + "multiple": false, + "typeClass": "primitive", + "value": "ProducerAbbreviation2" + }, + "producerURL": { + "typeName": "producerURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://ProducerURL2.org" + }, + "producerLogoURL": { + "typeName": "producerLogoURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://ProducerLogoURL2.org" + } + } + ] + }, + { + "typeName": "productionDate", + "multiple": false, + "typeClass": "primitive", + "value": "1003-01-01" + }, + { + "typeName": "productionPlace", + "multiple": true, + "typeClass": "primitive", + "value": ["ProductionPlace","Second ProductionPlace"] + }, + { + "typeName": "contributor", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "contributorType": { + "typeName": "contributorType", + "multiple": false, + "typeClass": "controlledVocabulary", + "value": "Data Collector" + }, + "contributorName": { + "typeName": "contributorName", + "multiple": false, + "typeClass": "primitive", + "value": "LastContributor1, FirstContributor1" + } + }, + { + "contributorType": { + "typeName": "contributorType", + "multiple": false, + "typeClass": "controlledVocabulary", + "value": "Data Curator" + }, + "contributorName": { + "typeName": "contributorName", + "multiple": false, + "typeClass": "primitive", + "value": "LastContributor2, FirstContributor2" + } + } + ] + }, + { + "typeName": "grantNumber", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "grantNumberAgency": { + "typeName": "grantNumberAgency", + "multiple": false, + "typeClass": "primitive", + "value": "GrantInformationGrantAgency1" + }, + "grantNumberValue": { + "typeName": "grantNumberValue", + "multiple": false, + "typeClass": "primitive", + "value": "GrantInformationGrantNumber1" + } + }, + { + "grantNumberAgency": { + "typeName": "grantNumberAgency", + "multiple": false, + "typeClass": "primitive", + "value": "GrantInformationGrantAgency2" + }, + "grantNumberValue": { + "typeName": "grantNumberValue", + "multiple": false, + "typeClass": "primitive", + "value": "GrantInformationGrantNumber2" + } + } + ] + }, + { + "typeName": "distributor", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "distributorName": { + "typeName": "distributorName", + "multiple": false, + "typeClass": "primitive", + "value": "LastDistributor1, FirstDistributor1" + }, + "distributorAffiliation": { + "typeName": "distributorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "DistributorAffiliation1" + }, + "distributorAbbreviation": { + "typeName": "distributorAbbreviation", + "multiple": false, + "typeClass": "primitive", + "value": "DistributorAbbreviation1" + }, + "distributorURL": { + "typeName": "distributorURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://DistributorURL1.org" + }, + "distributorLogoURL": { + "typeName": "distributorLogoURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://DistributorLogoURL1.org" + } + }, + { + "distributorName": { + "typeName": "distributorName", + "multiple": false, + "typeClass": "primitive", + "value": "LastDistributor2, FirstDistributor2" + }, + "distributorAffiliation": { + "typeName": "distributorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "DistributorAffiliation2" + }, + "distributorAbbreviation": { + "typeName": "distributorAbbreviation", + "multiple": false, + "typeClass": "primitive", + "value": "DistributorAbbreviation2" + }, + "distributorURL": { + "typeName": "distributorURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://DistributorURL2.org" + }, + "distributorLogoURL": { + "typeName": "distributorLogoURL", + "multiple": false, + "typeClass": "primitive", + "value": "http://DistributorLogoURL2.org" + } + } + ] + }, + { + "typeName": "distributionDate", + "multiple": false, + "typeClass": "primitive", + "value": "1004-01-01" + }, + { + "typeName": "depositor", + "multiple": false, + "typeClass": "primitive", + "value": "LastDepositor, FirstDepositor" + }, + { + "typeName": "dateOfDeposit", + "multiple": false, + "typeClass": "primitive", + "value": "1002-01-01" + }, + { + "typeName": "timePeriodCovered", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "timePeriodCoveredStart": { + "typeName": "timePeriodCoveredStart", + "multiple": false, + "typeClass": "primitive", + "value": "1005-01-01" + }, + "timePeriodCoveredEnd": { + "typeName": "timePeriodCoveredEnd", + "multiple": false, + "typeClass": "primitive", + "value": "1005-01-02" + } + }, + { + "timePeriodCoveredStart": { + "typeName": "timePeriodCoveredStart", + "multiple": false, + "typeClass": "primitive", + "value": "1005-02-01" + }, + "timePeriodCoveredEnd": { + "typeName": "timePeriodCoveredEnd", + "multiple": false, + "typeClass": "primitive", + "value": "1005-02-02" + } + } + ] + }, + { + "typeName": "dateOfCollection", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "dateOfCollectionStart": { + "typeName": "dateOfCollectionStart", + "multiple": false, + "typeClass": "primitive", + "value": "1006-01-01" + }, + "dateOfCollectionEnd": { + "typeName": "dateOfCollectionEnd", + "multiple": false, + "typeClass": "primitive", + "value": "1006-01-01" + } + }, + { + "dateOfCollectionStart": { + "typeName": "dateOfCollectionStart", + "multiple": false, + "typeClass": "primitive", + "value": "1006-02-01" + }, + "dateOfCollectionEnd": { + "typeName": "dateOfCollectionEnd", + "multiple": false, + "typeClass": "primitive", + "value": "1006-02-02" + } + } + ] + }, + { + "typeName": "kindOfData", + "multiple": true, + "typeClass": "primitive", + "value": [ + "KindOfData1", + "KindOfData2" + ] + }, + { + "typeName": "series", + "multiple": true, + "typeClass": "compound", + "value": [{ + "seriesName": { + "typeName": "seriesName", + "multiple": false, + "typeClass": "primitive", + "value": "SeriesName" + }, + "seriesInformation": { + "typeName": "seriesInformation", + "multiple": false, + "typeClass": "primitive", + "value": "SeriesInformation" + } + }] + }, + { + "typeName": "software", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "softwareName": { + "typeName": "softwareName", + "multiple": false, + "typeClass": "primitive", + "value": "SoftwareName1" + }, + "softwareVersion": { + "typeName": "softwareVersion", + "multiple": false, + "typeClass": "primitive", + "value": "SoftwareVersion1" + } + }, + { + "softwareName": { + "typeName": "softwareName", + "multiple": false, + "typeClass": "primitive", + "value": "SoftwareName2" + }, + "softwareVersion": { + "typeName": "softwareVersion", + "multiple": false, + "typeClass": "primitive", + "value": "SoftwareVersion2" + } + } + ] + }, + { + "typeName": "relatedMaterial", + "multiple": true, + "typeClass": "primitive", + "value": [ + "RelatedMaterial1", + "RelatedMaterial2" + ] + }, + { + "typeName": "relatedDatasets", + "multiple": true, + "typeClass": "primitive", + "value": [ + "RelatedDatasets1", + "RelatedDatasets2" + ] + }, + { + "typeName": "otherReferences", + "multiple": true, + "typeClass": "primitive", + "value": [ + "OtherReferences1", + "OtherReferences2" + ] + }, + { + "typeName": "dataSources", + "multiple": true, + "typeClass": "primitive", + "value": [ + "DataSources1", + "DataSources2" + ] + }, + { + "typeName": "originOfSources", + "multiple": false, + "typeClass": "primitive", + "value": "OriginOfSources" + }, + { + "typeName": "characteristicOfSources", + "multiple": false, + "typeClass": "primitive", + "value": "CharacteristicOfSourcesNoted" + }, + { + "typeName": "accessToSources", + "multiple": false, + "typeClass": "primitive", + "value": "DocumentationAndAccessToSources" + } + ] + }, + "geospatial": { + "displayName": "Geospatial Metadata", + "fields": [ + { + "typeName": "geographicCoverage", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "country": { + "typeName": "country", + "multiple": false, + "typeClass": "controlledVocabulary", + "value": "Afghanistan" + }, + "state": { + "typeName": "state", + "multiple": false, + "typeClass": "primitive", + "value": "GeographicCoverageStateProvince1" + }, + "city": { + "typeName": "city", + "multiple": false, + "typeClass": "primitive", + "value": "GeographicCoverageCity1" + }, + "otherGeographicCoverage": { + "typeName": "otherGeographicCoverage", + "multiple": false, + "typeClass": "primitive", + "value": "GeographicCoverageOther1" + } + }, + { + "country": { + "typeName": "country", + "multiple": false, + "typeClass": "controlledVocabulary", + "value": "Albania" + }, + "state": { + "typeName": "state", + "multiple": false, + "typeClass": "primitive", + "value": "GeographicCoverageStateProvince2" + }, + "city": { + "typeName": "city", + "multiple": false, + "typeClass": "primitive", + "value": "GeographicCoverageCity2" + }, + "otherGeographicCoverage": { + "typeName": "otherGeographicCoverage", + "multiple": false, + "typeClass": "primitive", + "value": "GeographicCoverageOther2" + } + } + ] + }, + { + "typeName": "geographicUnit", + "multiple": true, + "typeClass": "primitive", + "value": [ + "GeographicUnit1", + "GeographicUnit2" + ] + }, + { + "typeName": "geographicBoundingBox", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "westLongitude": { + "typeName": "westLongitude", + "multiple": false, + "typeClass": "primitive", + "value": "-72" + }, + "eastLongitude": { + "typeName": "eastLongitude", + "multiple": false, + "typeClass": "primitive", + "value": "-70" + }, + "northLatitude": { + "typeName": "northLatitude", + "multiple": false, + "typeClass": "primitive", + "value": "43" + }, + "southLatitude": { + "typeName": "southLatitude", + "multiple": false, + "typeClass": "primitive", + "value": "42" + } + }, + { + "westLongitude": { + "typeName": "westLongitude", + "multiple": false, + "typeClass": "primitive", + "value": "-18" + }, + "eastLongitude": { + "typeName": "eastLongitude", + "multiple": false, + "typeClass": "primitive", + "value": "-13" + }, + "northLatitude": { + "typeName": "northLatitude", + "multiple": false, + "typeClass": "primitive", + "value": "29" + }, + "southLatitude": { + "typeName": "southLatitude", + "multiple": false, + "typeClass": "primitive", + "value": "28" + } + } + ] + } + ] + }, + "socialscience": { + "displayName": "Social Science and Humanities Metadata", + "fields": [ + { + "typeName": "unitOfAnalysis", + "multiple": true, + "typeClass": "primitive", + "value": [ + "UnitOfAnalysis1", + "UnitOfAnalysis2" + ] + }, + { + "typeName": "universe", + "multiple": true, + "typeClass": "primitive", + "value": [ + "Universe1", + "Universe2" + ] + }, + { + "typeName": "timeMethod", + "multiple": false, + "typeClass": "primitive", + "value": "TimeMethod" + }, + { + "typeName": "dataCollector", + "multiple": false, + "typeClass": "primitive", + "value": "LastDataCollector1, FirstDataCollector1" + }, + { + "typeName": "collectorTraining", + "multiple": false, + "typeClass": "primitive", + "value": "CollectorTraining" + }, + { + "typeName": "frequencyOfDataCollection", + "multiple": false, + "typeClass": "primitive", + "value": "Frequency" + }, + { + "typeName": "samplingProcedure", + "multiple": false, + "typeClass": "primitive", + "value": "SamplingProcedure" + }, + { + "typeName": "targetSampleSize", + "multiple": false, + "typeClass": "compound", + "value": { + "targetSampleActualSize": { + "typeName": "targetSampleActualSize", + "multiple": false, + "typeClass": "primitive", + "value": "100" + }, + "targetSampleSizeFormula": { + "typeName": "targetSampleSizeFormula", + "multiple": false, + "typeClass": "primitive", + "value": "TargetSampleSizeFormula" + } + } + }, + { + "typeName": "deviationsFromSampleDesign", + "multiple": false, + "typeClass": "primitive", + "value": "MajorDeviationsForSampleDesign" + }, + { + "typeName": "collectionMode", + "multiple": true, + "typeClass": "primitive", + "value": ["CollectionMode"] + }, + { + "typeName": "researchInstrument", + "multiple": false, + "typeClass": "primitive", + "value": "TypeOfResearchInstrument" + }, + { + "typeName": "dataCollectionSituation", + "multiple": false, + "typeClass": "primitive", + "value": "CharacteristicsOfDataCollectionSituation" + }, + { + "typeName": "actionsToMinimizeLoss", + "multiple": false, + "typeClass": "primitive", + "value": "ActionsToMinimizeLosses" + }, + { + "typeName": "controlOperations", + "multiple": false, + "typeClass": "primitive", + "value": "ControlOperations" + }, + { + "typeName": "weighting", + "multiple": false, + "typeClass": "primitive", + "value": "Weighting" + }, + { + "typeName": "cleaningOperations", + "multiple": false, + "typeClass": "primitive", + "value": "CleaningOperations" + }, + { + "typeName": "datasetLevelErrorNotes", + "multiple": false, + "typeClass": "primitive", + "value": "StudyLevelErrorNotes" + }, + { + "typeName": "responseRate", + "multiple": false, + "typeClass": "primitive", + "value": "ResponseRate" + }, + { + "typeName": "samplingErrorEstimates", + "multiple": false, + "typeClass": "primitive", + "value": "EstimatesOfSamplingError" + }, + { + "typeName": "otherDataAppraisal", + "multiple": false, + "typeClass": "primitive", + "value": "OtherFormsOfDataAppraisal" + }, + { + "typeName": "socialScienceNotes", + "multiple": false, + "typeClass": "compound", + "value": { + "socialScienceNotesType": { + "typeName": "socialScienceNotesType", + "multiple": false, + "typeClass": "primitive", + "value": "NotesType" + }, + "socialScienceNotesSubject": { + "typeName": "socialScienceNotesSubject", + "multiple": false, + "typeClass": "primitive", + "value": "NotesSubject" + }, + "socialScienceNotesText": { + "typeName": "socialScienceNotesText", + "multiple": false, + "typeClass": "primitive", + "value": "NotesText" + } + } + } + ] + }, + "astrophysics": { + "displayName": "Astronomy and Astrophysics Metadata", + "fields": [ + { + "typeName": "astroType", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Image", + "Mosaic", + "EventList", + "Cube" + ] + }, + { + "typeName": "astroFacility", + "multiple": true, + "typeClass": "primitive", + "value": [ + "Facility1", + "Facility2" + ] + }, + { + "typeName": "astroInstrument", + "multiple": true, + "typeClass": "primitive", + "value": [ + "Instrument1", + "Instrument2" + ] + }, + { + "typeName": "astroObject", + "multiple": true, + "typeClass": "primitive", + "value": [ + "Object1", + "Object2" + ] + }, + { + "typeName": "resolution.Spatial", + "multiple": false, + "typeClass": "primitive", + "value": "SpatialResolution" + }, + { + "typeName": "resolution.Spectral", + "multiple": false, + "typeClass": "primitive", + "value": "SpectralResolution" + }, + { + "typeName": "resolution.Temporal", + "multiple": false, + "typeClass": "primitive", + "value": "TimeResolution" + }, + { + "typeName": "coverage.Spectral.Bandpass", + "multiple": true, + "typeClass": "primitive", + "value": [ + "Bandpass1", + "Bandpass2" + ] + }, + { + "typeName": "coverage.Spectral.CentralWavelength", + "multiple": true, + "typeClass": "primitive", + "value": [ + "3001", + "3002" + ] + }, + { + "typeName": "coverage.Spectral.Wavelength", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "coverage.Spectral.MinimumWavelength": { + "typeName": "coverage.Spectral.MinimumWavelength", + "multiple": false, + "typeClass": "primitive", + "value": "4001" + }, + "coverage.Spectral.MaximumWavelength": { + "typeName": "coverage.Spectral.MaximumWavelength", + "multiple": false, + "typeClass": "primitive", + "value": "4002" + } + }, + { + "coverage.Spectral.MinimumWavelength": { + "typeName": "coverage.Spectral.MinimumWavelength", + "multiple": false, + "typeClass": "primitive", + "value": "4003" + }, + "coverage.Spectral.MaximumWavelength": { + "typeName": "coverage.Spectral.MaximumWavelength", + "multiple": false, + "typeClass": "primitive", + "value": "4004" + } + } + ] + }, + { + "typeName": "coverage.Temporal", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "coverage.Temporal.StartTime": { + "typeName": "coverage.Temporal.StartTime", + "multiple": false, + "typeClass": "primitive", + "value": "1007-01-01" + }, + "coverage.Temporal.StopTime": { + "typeName": "coverage.Temporal.StopTime", + "multiple": false, + "typeClass": "primitive", + "value": "1007-01-02" + } + }, + { + "coverage.Temporal.StartTime": { + "typeName": "coverage.Temporal.StartTime", + "multiple": false, + "typeClass": "primitive", + "value": "1007-02-01" + }, + "coverage.Temporal.StopTime": { + "typeName": "coverage.Temporal.StopTime", + "multiple": false, + "typeClass": "primitive", + "value": "1007-02-02" + } + } + ] + }, + { + "typeName": "coverage.Spatial", + "multiple": true, + "typeClass": "primitive", + "value": [ + "SkyCoverage1", + "SkyCoverage2" + ] + }, + { + "typeName": "coverage.Depth", + "multiple": false, + "typeClass": "primitive", + "value": "200" + }, + { + "typeName": "coverage.ObjectDensity", + "multiple": false, + "typeClass": "primitive", + "value": "300" + }, + { + "typeName": "coverage.ObjectCount", + "multiple": false, + "typeClass": "primitive", + "value": "400" + }, + { + "typeName": "coverage.SkyFraction", + "multiple": false, + "typeClass": "primitive", + "value": "500" + }, + { + "typeName": "coverage.Polarization", + "multiple": false, + "typeClass": "primitive", + "value": "Polarization" + }, + { + "typeName": "redshiftType", + "multiple": false, + "typeClass": "primitive", + "value": "RedshiftType" + }, + { + "typeName": "resolution.Redshift", + "multiple": false, + "typeClass": "primitive", + "value": "600" + }, + { + "typeName": "coverage.RedshiftValue", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "coverage.Redshift.MinimumValue": { + "typeName": "coverage.Redshift.MinimumValue", + "multiple": false, + "typeClass": "primitive", + "value": "701" + }, + "coverage.Redshift.MaximumValue": { + "typeName": "coverage.Redshift.MaximumValue", + "multiple": false, + "typeClass": "primitive", + "value": "702" + } + }, + { + "coverage.Redshift.MinimumValue": { + "typeName": "coverage.Redshift.MinimumValue", + "multiple": false, + "typeClass": "primitive", + "value": "703" + }, + "coverage.Redshift.MaximumValue": { + "typeName": "coverage.Redshift.MaximumValue", + "multiple": false, + "typeClass": "primitive", + "value": "704" + } + } + ] + } + ] + }, + "biomedical": { + "displayName": "Life Sciences Metadata", + "fields": [ + { + "typeName": "studyDesignType", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Case Control", + "Cross Sectional", + "Cohort Study", + "Not Specified" + ] + }, + { + "typeName": "studyFactorType", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Age", + "Biomarkers", + "Cell Surface Markers", + "Developmental Stage" + ] + }, + { + "typeName": "studyAssayOrganism", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Arabidopsis thaliana", + "Bos taurus", + "Caenorhabditis elegans", + "Danio rerio (zebrafish)" + ] + }, + { + "typeName": "studyAssayOtherOrganism", + "multiple": true, + "typeClass": "primitive", + "value": [ + "OtherOrganism1", + "OtherOrganism2" + ] + }, + { + "typeName": "studyAssayMeasurementType", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "genome sequencing", + "cell sorting", + "clinical chemistry analysis", + "DNA methylation profiling" + ] + }, + { + "typeName": "studyAssayOtherMeasurmentType", + "multiple": true, + "typeClass": "primitive", + "value": [ + "OtherMeasurementType1", + "OtherMeasurementType2" + ] + }, + { + "typeName": "studyAssayTechnologyType", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "culture based drug susceptibility testing, single concentration", + "culture based drug susceptibility testing, two concentrations", + "culture based drug susceptibility testing, three or more concentrations (minimium inhibitory concentration measurement)", + "flow cytometry" + ] + }, + { + "typeName": "studyAssayPlatform", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "210-MS GC Ion Trap (Varian)", + "220-MS GC Ion Trap (Varian)", + "225-MS GC Ion Trap (Varian)", + "300-MS quadrupole GC/MS (Varian)" + ] + }, + { + "typeName": "studyAssayCellType", + "multiple": true, + "typeClass": "primitive", + "value": [ + "CellType1", + "CellType2" + ] + } + ] + }, + "journal": { + "displayName": "Journal Metadata", + "fields": [ + { + "typeName": "journalVolumeIssue", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "journalVolume": { + "typeName": "journalVolume", + "multiple": false, + "typeClass": "primitive", + "value": "JournalVolume1" + }, + "journalIssue": { + "typeName": "journalIssue", + "multiple": false, + "typeClass": "primitive", + "value": "JournalIssue1" + }, + "journalPubDate": { + "typeName": "journalPubDate", + "multiple": false, + "typeClass": "primitive", + "value": "1008-01-01" + } + }, + { + "journalVolume": { + "typeName": "journalVolume", + "multiple": false, + "typeClass": "primitive", + "value": "JournalVolume2" + }, + "journalIssue": { + "typeName": "journalIssue", + "multiple": false, + "typeClass": "primitive", + "value": "JournalIssue2" + }, + "journalPubDate": { + "typeName": "journalPubDate", + "multiple": false, + "typeClass": "primitive", + "value": "1008-02-01" + } + } + ] + }, + { + "typeName": "journalArticleType", + "multiple": false, + "typeClass": "controlledVocabulary", + "value": "abstract" + } + ] + } + } + } +} diff --git a/scripts/api/data/metadatablocks/3d_objects.tsv b/scripts/api/data/metadatablocks/3d_objects.tsv new file mode 100644 index 00000000000..e753e4dfbed --- /dev/null +++ b/scripts/api/data/metadatablocks/3d_objects.tsv @@ -0,0 +1,45 @@ +#metadataBlock name dataverseAlias displayName + 3dobjects 3D Objects Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI + 3d3DTechnique 3D Technique The technique used for capturing the 3D data text 0 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE 3dobjects + 3dEquipment Equipment The equipment used for capturing the 3D data text 1 #VALUE TRUE FALSE FALSE FALSE FALSE FALSE 3dobjects + 3dLightingSetup Lighting Setup The lighting used while capturing the 3D data text 2 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE 3dobjects + 3dMasterFilePolygonCount Master File Polygon Count The high-resolution polygon count text 3 #VALUE TRUE FALSE FALSE FALSE FALSE FALSE 3dobjects + 3dExportedFilePolygonCount Exported File Polygon Count The exported mesh polygon count text 4 #VALUE TRUE FALSE TRUE FALSE FALSE FALSE 3dobjects + 3dExportedFileFormat Exported File Format The format of the exported mesh text 5 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE 3dobjects + 3dAltText Alt-Text A physical description of the object modeled textbox 6 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE 3dobjects + 3dMaterialComposition Material Composition The material used to create the object, e.g. stone text 7 #VALUE TRUE FALSE TRUE TRUE FALSE FALSE 3dobjects + 3dObjectDimensions Object Dimensions The general measurements of the physical object none 8 ; FALSE FALSE FALSE FALSE FALSE FALSE 3dobjects + 3dLength Length The rough length of the object text 9 Length: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE 3dObjectDimensions 3dobjects + 3dWidth Width The rough width of the object text 10 Width: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE 3dObjectDimensions 3dobjects + 3dHeight Height The rough height of the object text 11 Height: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE 3dObjectDimensions 3dobjects + 3dWeight Weight The rough weight of the object text 12 Weight:#VALUE FALSE FALSE FALSE FALSE FALSE FALSE 3dObjectDimensions 3dobjects + 3dUnit Unit The unit of measurement used for the object dimensions text 13 Unit: #VALUE FALSE TRUE FALSE TRUE FALSE FALSE 3dObjectDimensions 3dobjects + 3dHandling Instructions Safety and special handling instructions for the object textbox 14 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE 3dobjects +#controlledVocabulary DatasetField Value identifier displayOrder + 3d3DTechnique IR Scanner 0 + 3d3DTechnique Laser 1 + 3d3DTechnique Modelled 2 + 3d3DTechnique Photogrammetry 3 + 3d3DTechnique RTI 4 + 3d3DTechnique Structured Light 5 + 3d3DTechnique Tomographic 6 + 3d3DTechnique Other 7 + 3dLightingSetup Natural Light 8 + 3dLightingSetup Lightbox 9 + 3dLightingSetup LED 10 + 3dLightingSetup Fluorescent 11 + 3dLightingSetup Other 12 + 3dUnit cm 13 + 3dUnit m 14 + 3dUnit in 15 + 3dUnit ft 16 + 3dUnit lbs 17 + 3dExportedFileFormat .fbx 18 + 3dExportedFileFormat .glb 19 + 3dExportedFileFormat .gltf 20 + 3dExportedFileFormat .obj 21 + 3dExportedFileFormat .stl 22 + 3dExportedFileFormat .usdz 23 + 3dExportedFileFormat .x3d 24 + 3dExportedFileFormat other 25 diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv index abc09465603..dea23aa9a73 100644 --- a/scripts/api/data/metadatablocks/citation.tsv +++ b/scripts/api/data/metadatablocks/citation.tsv @@ -133,13 +133,14 @@ contributorType Work Package Leader 15 contributorType Other 16 authorIdentifierScheme ORCID 0 - authorIdentifierScheme ISNI 1 - authorIdentifierScheme LCNA 2 - authorIdentifierScheme VIAF 3 - authorIdentifierScheme GND 4 - authorIdentifierScheme DAI 5 - authorIdentifierScheme ResearcherID 6 - authorIdentifierScheme ScopusID 7 + authorIdentifierScheme ROR 1 + authorIdentifierScheme ISNI 2 + authorIdentifierScheme LCNA 3 + authorIdentifierScheme VIAF 4 + authorIdentifierScheme GND 5 + authorIdentifierScheme DAI 6 + authorIdentifierScheme ResearcherID 7 + authorIdentifierScheme ScopusID 8 language 'Are'are alu 0 alu language 'Auhelawa kud 1 kud language A'ou aou 2 aou @@ -8061,4 +8062,4 @@ publicationRelationType IsSupplementTo RT3 3 publicationRelationType IsSupplementedBy RT4 4 publicationRelationType IsReferencedBy RT5 5 - publicationRelationType References RT6 6 \ No newline at end of file + publicationRelationType References RT6 6 diff --git a/scripts/api/setup-datasetfields.sh b/scripts/api/setup-datasetfields.sh index 51da677ceb8..908988f8acb 100755 --- a/scripts/api/setup-datasetfields.sh +++ b/scripts/api/setup-datasetfields.sh @@ -11,3 +11,4 @@ curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCR curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCRIPT_PATH"/data/metadatablocks/astrophysics.tsv -H "Content-type: text/tab-separated-values" curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCRIPT_PATH"/data/metadatablocks/biomedical.tsv -H "Content-type: text/tab-separated-values" curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCRIPT_PATH"/data/metadatablocks/journals.tsv -H "Content-type: text/tab-separated-values" +curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCRIPT_PATH"/data/metadatablocks/3d_objects.tsv -H "Content-type: text/tab-separated-values" diff --git a/scripts/api/update-datasetfields.sh b/scripts/api/update-datasetfields.sh deleted file mode 100644 index ae099f8dcfd..00000000000 --- a/scripts/api/update-datasetfields.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/sh -curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/citation.tsv -H "Content-type: text/tab-separated-values" -curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/geospatial.tsv -H "Content-type: text/tab-separated-values" -curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/social_science.tsv -H "Content-type: text/tab-separated-values" -curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/astrophysics.tsv -H "Content-type: text/tab-separated-values" -curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/biomedical.tsv -H "Content-type: text/tab-separated-values" -curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/journals.tsv -H "Content-type: text/tab-separated-values" \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java index 02fb59751fb..8d46e956655 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java @@ -38,6 +38,10 @@ import org.apache.commons.text.StringEscapeUtils; import org.apache.commons.lang3.StringUtils; +import static edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider.DOI_PROTOCOL; +import static edu.harvard.iq.dataverse.pidproviders.handle.HandlePidProvider.HDL_PROTOCOL; +import static edu.harvard.iq.dataverse.pidproviders.perma.PermaLinkPidProvider.PERMA_PROTOCOL; + /** * * @author gdurand, qqmyers @@ -293,11 +297,13 @@ public void writeAsBibtexCitation(OutputStream os) throws IOException { out.write("version = {"); out.write(version); out.write("},\r\n"); - out.write("doi = {"); - out.write(persistentId.getAuthority()); - out.write("/"); - out.write(persistentId.getIdentifier()); - out.write("},\r\n"); + if("doi".equals(persistentId.getProtocol())) { + out.write("doi = {"); + out.write(persistentId.getAuthority()); + out.write("/"); + out.write(persistentId.getIdentifier()); + out.write("},\r\n"); + } out.write("url = {"); out.write(persistentId.asURL()); out.write("}\r\n"); @@ -595,11 +601,21 @@ private void createEndNoteXML(XMLStreamWriter xmlw) throws XMLStreamException { } xmlw.writeStartElement("urls"); - xmlw.writeStartElement("related-urls"); - xmlw.writeStartElement("url"); - xmlw.writeCharacters(getPersistentId().asURL()); - xmlw.writeEndElement(); // url - xmlw.writeEndElement(); // related-urls + if (persistentId != null) { + if (PERMA_PROTOCOL.equals(persistentId.getProtocol()) || HDL_PROTOCOL.equals(persistentId.getProtocol())) { + xmlw.writeStartElement("web-urls"); + xmlw.writeStartElement("url"); + xmlw.writeCharacters(getPersistentId().asURL()); + xmlw.writeEndElement(); // url + xmlw.writeEndElement(); // web-urls + } else if (DOI_PROTOCOL.equals(persistentId.getProtocol())) { + xmlw.writeStartElement("related-urls"); + xmlw.writeStartElement("url"); + xmlw.writeCharacters(getPersistentId().asURL()); + xmlw.writeEndElement(); // url + xmlw.writeEndElement(); // related-urls + } + } xmlw.writeEndElement(); // urls // a DataFile citation also includes the filename and (for Tabular @@ -617,10 +633,9 @@ private void createEndNoteXML(XMLStreamWriter xmlw) throws XMLStreamException { xmlw.writeEndElement(); // custom2 } } - if (persistentId != null) { + if (persistentId != null && "doi".equals(persistentId.getProtocol())) { xmlw.writeStartElement("electronic-resource-num"); - String electResourceNum = persistentId.getProtocol() + "/" + persistentId.getAuthority() + "/" - + persistentId.getIdentifier(); + String electResourceNum = persistentId.asRawIdentifier(); xmlw.writeCharacters(electResourceNum); xmlw.writeEndElement(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 1a610d9ea6e..01c1a48e117 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -1142,4 +1142,12 @@ public boolean isDeaccessioned() { } return inDeaccessionedVersions; // since any published version would have already returned } + public boolean isInDatasetVersion(DatasetVersion version) { + for (FileMetadata fmd : getFileMetadatas()) { + if (fmd.getDatasetVersion().equals(version)) { + return true; + } + } + return false; + } } // end of class diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 78579b1de21..79c64d03d60 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -68,6 +68,8 @@ query = "SELECT o FROM Dataset o WHERE o.creator.id=:creatorId"), @NamedQuery(name = "Dataset.findByReleaseUserId", query = "SELECT o FROM Dataset o WHERE o.releaseUser.id=:releaseUserId"), + @NamedQuery(name = "Dataset.countAll", + query = "SELECT COUNT(ds) FROM Dataset ds") }) /* diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index ded7c83de62..210cf383378 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse; +import edu.harvard.iq.dataverse.dataset.DatasetType; import java.io.IOException; import java.io.StringReader; import java.net.URI; @@ -871,7 +872,7 @@ public List findAllDisplayedOnCreateInMetadataBlock(MetadataBl Root metadataBlockRoot = criteriaQuery.from(MetadataBlock.class); Root datasetFieldTypeRoot = criteriaQuery.from(DatasetFieldType.class); - Predicate requiredInDataversePredicate = buildRequiredInDataversePredicate(criteriaBuilder, datasetFieldTypeRoot); + Predicate fieldRequiredInTheInstallation = buildFieldRequiredInTheInstallationPredicate(criteriaBuilder, datasetFieldTypeRoot); criteriaQuery.where( criteriaBuilder.and( @@ -879,7 +880,7 @@ public List findAllDisplayedOnCreateInMetadataBlock(MetadataBl datasetFieldTypeRoot.in(metadataBlockRoot.get("datasetFieldTypes")), criteriaBuilder.or( criteriaBuilder.isTrue(datasetFieldTypeRoot.get("displayOnCreate")), - requiredInDataversePredicate + fieldRequiredInTheInstallation ) ) ); @@ -890,9 +891,9 @@ public List findAllDisplayedOnCreateInMetadataBlock(MetadataBl return typedQuery.getResultList(); } - public List findAllInMetadataBlockAndDataverse(MetadataBlock metadataBlock, Dataverse dataverse, boolean onlyDisplayedOnCreate) { + public List findAllInMetadataBlockAndDataverse(MetadataBlock metadataBlock, Dataverse dataverse, boolean onlyDisplayedOnCreate, DatasetType datasetType) { if (!dataverse.isMetadataBlockRoot() && dataverse.getOwner() != null) { - return findAllInMetadataBlockAndDataverse(metadataBlock, dataverse.getOwner(), onlyDisplayedOnCreate); + return findAllInMetadataBlockAndDataverse(metadataBlock, dataverse.getOwner(), onlyDisplayedOnCreate, datasetType); } CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); @@ -900,6 +901,29 @@ public List findAllInMetadataBlockAndDataverse(MetadataBlock m Root metadataBlockRoot = criteriaQuery.from(MetadataBlock.class); Root datasetFieldTypeRoot = criteriaQuery.from(DatasetFieldType.class); + + // Build the main predicate to include fields that belong to the specified dataverse and metadataBlock and match the onlyDisplayedOnCreate value. + Predicate fieldPresentInDataverse = buildFieldPresentInDataversePredicate(dataverse, onlyDisplayedOnCreate, criteriaQuery, criteriaBuilder, datasetFieldTypeRoot, metadataBlockRoot); + + // Build an additional predicate to include fields from the datasetType, if the datasetType is specified and contains the given metadataBlock. + Predicate fieldPresentInDatasetType = buildFieldPresentInDatasetTypePredicate(datasetType, criteriaQuery, criteriaBuilder, datasetFieldTypeRoot, metadataBlockRoot, onlyDisplayedOnCreate); + + // Build the final WHERE clause by combining all the predicates. + criteriaQuery.where( + criteriaBuilder.equal(metadataBlockRoot.get("id"), metadataBlock.getId()), // Match the MetadataBlock ID. + datasetFieldTypeRoot.in(metadataBlockRoot.get("datasetFieldTypes")), // Ensure the DatasetFieldType is part of the MetadataBlock. + criteriaBuilder.or( + fieldPresentInDataverse, + fieldPresentInDatasetType + ) + ); + + criteriaQuery.select(datasetFieldTypeRoot); + + return em.createQuery(criteriaQuery).getResultList(); + } + + private Predicate buildFieldPresentInDataversePredicate(Dataverse dataverse, boolean onlyDisplayedOnCreate, CriteriaQuery criteriaQuery, CriteriaBuilder criteriaBuilder, Root datasetFieldTypeRoot, Root metadataBlockRoot) { Root dataverseRoot = criteriaQuery.from(Dataverse.class); // Join Dataverse with DataverseFieldTypeInputLevel on the "dataverseFieldTypeInputLevels" attribute, using a LEFT JOIN. @@ -930,7 +954,7 @@ public List findAllInMetadataBlockAndDataverse(MetadataBlock m Predicate hasNoInputLevelPredicate = criteriaBuilder.not(criteriaBuilder.exists(subquery)); // Define a predicate to include the required fields in Dataverse. - Predicate requiredInDataversePredicate = buildRequiredInDataversePredicate(criteriaBuilder, datasetFieldTypeRoot); + Predicate fieldRequiredInTheInstallation = buildFieldRequiredInTheInstallationPredicate(criteriaBuilder, datasetFieldTypeRoot); // Define a predicate for displaying DatasetFieldTypes on create. // If onlyDisplayedOnCreate is true, include fields that: @@ -941,28 +965,57 @@ public List findAllInMetadataBlockAndDataverse(MetadataBlock m ? criteriaBuilder.or( criteriaBuilder.or( criteriaBuilder.isTrue(datasetFieldTypeRoot.get("displayOnCreate")), - requiredInDataversePredicate + fieldRequiredInTheInstallation ), requiredAsInputLevelPredicate ) : criteriaBuilder.conjunction(); - // Build the final WHERE clause by combining all the predicates. - criteriaQuery.where( + // Combine all the predicates. + return criteriaBuilder.and( criteriaBuilder.equal(dataverseRoot.get("id"), dataverse.getId()), // Match the Dataverse ID. - criteriaBuilder.equal(metadataBlockRoot.get("id"), metadataBlock.getId()), // Match the MetadataBlock ID. metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")), // Ensure the MetadataBlock is part of the Dataverse. - datasetFieldTypeRoot.in(metadataBlockRoot.get("datasetFieldTypes")), // Ensure the DatasetFieldType is part of the MetadataBlock. criteriaBuilder.or(includedAsInputLevelPredicate, hasNoInputLevelPredicate), // Include DatasetFieldTypes based on the input level predicates. displayedOnCreatePredicate // Apply the display-on-create filter if necessary. ); + } - criteriaQuery.select(datasetFieldTypeRoot).distinct(true); - - return em.createQuery(criteriaQuery).getResultList(); + private Predicate buildFieldPresentInDatasetTypePredicate(DatasetType datasetType, + CriteriaQuery criteriaQuery, + CriteriaBuilder criteriaBuilder, + Root datasetFieldTypeRoot, + Root metadataBlockRoot, + boolean onlyDisplayedOnCreate) { + Predicate datasetTypePredicate = criteriaBuilder.isFalse(criteriaBuilder.literal(true)); // Initialize datasetTypePredicate to always false by default + if (datasetType != null) { + // Create a subquery to check for the presence of the specified metadataBlock within the datasetType + Subquery datasetTypeSubquery = criteriaQuery.subquery(Long.class); + Root datasetTypeRoot = criteriaQuery.from(DatasetType.class); + + // Define a predicate for displaying DatasetFieldTypes on create. + // If onlyDisplayedOnCreate is true, include fields that are either marked as displayed on create OR marked as required. + // Otherwise, use an always-true predicate (conjunction). + Predicate displayedOnCreatePredicate = onlyDisplayedOnCreate ? + criteriaBuilder.or( + criteriaBuilder.isTrue(datasetFieldTypeRoot.get("displayOnCreate")), + buildFieldRequiredInTheInstallationPredicate(criteriaBuilder, datasetFieldTypeRoot) + ) + : criteriaBuilder.conjunction(); + + datasetTypeSubquery.select(criteriaBuilder.literal(1L)) + .where( + criteriaBuilder.equal(datasetTypeRoot.get("id"), datasetType.getId()), // Match the DatasetType ID. + metadataBlockRoot.in(datasetTypeRoot.get("metadataBlocks")), // Ensure the metadataBlock is included in the datasetType's list of metadata blocks. + displayedOnCreatePredicate + ); + + // Now set the datasetTypePredicate to true if the subquery finds a matching metadataBlock + datasetTypePredicate = criteriaBuilder.exists(datasetTypeSubquery); + } + return datasetTypePredicate; } - private Predicate buildRequiredInDataversePredicate(CriteriaBuilder criteriaBuilder, Root datasetFieldTypeRoot) { + private Predicate buildFieldRequiredInTheInstallationPredicate(CriteriaBuilder criteriaBuilder, Root datasetFieldTypeRoot) { // Predicate to check if the current DatasetFieldType is required. Predicate isRequired = criteriaBuilder.isTrue(datasetFieldTypeRoot.get("required")); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java index 01785359e0e..16adf8e36bc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java @@ -531,28 +531,36 @@ public String getDisplayName() { public SolrField getSolrField() { SolrField.SolrType solrType = SolrField.SolrType.TEXT_EN; if (fieldType != null) { - - /** - * @todo made more decisions based on fieldType: index as dates, - * integers, and floats so we can do range queries etc. - */ if (fieldType.equals(FieldType.DATE)) { solrType = SolrField.SolrType.DATE; } else if (fieldType.equals(FieldType.EMAIL)) { solrType = SolrField.SolrType.EMAIL; + } else if (fieldType.equals(FieldType.INT)) { + solrType = SolrField.SolrType.INTEGER; + } else if (fieldType.equals(FieldType.FLOAT)) { + solrType = SolrField.SolrType.FLOAT; } - Boolean parentAllowsMultiplesBoolean = false; - if (isHasParent()) { - if (getParentDatasetFieldType() != null) { - DatasetFieldType parent = getParentDatasetFieldType(); - parentAllowsMultiplesBoolean = parent.isAllowMultiples(); + Boolean anyParentAllowsMultiplesBoolean = false; + DatasetFieldType currentDatasetFieldType = this; + // Traverse up through all parents of dataset field type + // If any one of them allows multiples, this child's Solr field must be multi-valued + while (currentDatasetFieldType.isHasParent()) { + if (currentDatasetFieldType.getParentDatasetFieldType() != null) { + DatasetFieldType parent = currentDatasetFieldType.getParentDatasetFieldType(); + if (parent.isAllowMultiples()) { + anyParentAllowsMultiplesBoolean = true; + break; // no need to keep traversing + } + currentDatasetFieldType = parent; + } else { + break; } } boolean makeSolrFieldMultivalued; // http://stackoverflow.com/questions/5800762/what-is-the-use-of-multivalued-field-type-in-solr - if (allowMultiples || parentAllowsMultiplesBoolean || isControlledVocabulary()) { + if (allowMultiples || anyParentAllowsMultiplesBoolean || isControlledVocabulary()) { makeSolrFieldMultivalued = true; } else { makeSolrFieldMultivalued = false; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 62fee549796..e024a80673d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -163,7 +163,7 @@ import edu.harvard.iq.dataverse.util.FileMetadataUtil; import java.util.Comparator; import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException; import org.apache.solr.client.solrj.response.FacetField; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; @@ -1041,7 +1041,7 @@ public Set getFileIdsInVersionFromSolr(Long datasetVersionId, String patte try { queryResponse = solrClientService.getSolrClient().query(solrQuery); - } catch (HttpSolrClient.RemoteSolrException ex) { + } catch (RemoteSolrException ex) { logger.fine("Remote Solr Exception: " + ex.getLocalizedMessage()); String msg = ex.getLocalizedMessage(); if (msg.contains(SearchFields.FILE_DELETED)) { @@ -1985,6 +1985,7 @@ private String init(boolean initFull) { setDataverseSiteUrl(systemConfig.getDataverseSiteUrl()); guestbookResponse = new GuestbookResponse(); + anonymizedAccess = null; String sortOrder = getSortOrder(); if(sortOrder != null) { @@ -5695,7 +5696,7 @@ public String getPrivateUrlLink(PrivateUrl privateUrl) { public boolean isAnonymizedAccess() { if (anonymizedAccess == null) { - if (session.getUser() instanceof PrivateUrlUser) { + if (session.getUser() instanceof PrivateUrlUser && workingVersion.isDraft()) { anonymizedAccess = ((PrivateUrlUser) session.getUser()).hasAnonymizedAccess(); } else { anonymizedAccess = false; @@ -5719,6 +5720,22 @@ public boolean isAnonymizedAccessEnabled() { return false; } } + + String anonymizedFieldTypeNames = null; + + public String getAnonymizedFieldTypeNames() { + if (anonymizedFieldTypeNames != null) { + return anonymizedFieldTypeNames; + } + if (settingsWrapper.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames) != null) { + anonymizedFieldTypeNames = settingsWrapper.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames); + + } else { + anonymizedFieldTypeNames = ""; + + } + return anonymizedFieldTypeNames; + } // todo: we should be able to remove - this is passed in the html pages to other fragments, but they could just access this service bean directly. public FileDownloadServiceBean getFileDownloadService() { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index e519614ba55..9a8c43668cb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -1092,4 +1092,12 @@ public List getVersionStates(long id) { } } + /** + * Returns the total number of Datasets. + * @return the number of datasets in the database + */ + public long getDatasetCount() { + return em.createNamedQuery("Dataset.countAll", Long.class).getSingleResult(); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java index c5d6c31386c..741b6ff15a1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java @@ -373,9 +373,9 @@ public static Map> compareFileMetadatas(FileMetadata fmdo, F List.of(fmdo.getLabel(), fmdn.getLabel())); } - if (!StringUtils.equals(fmdo.getProvFreeForm(), fmdn.getProvFreeForm())) { + if (!StringUtils.equals(StringUtil.nullToEmpty(fmdo.getProvFreeForm()), StringUtil.nullToEmpty(fmdn.getProvFreeForm()))) { fileMetadataChanged.put("ProvFreeForm", - List.of(fmdo.getProvFreeForm(), fmdn.getProvFreeForm())); + List.of(StringUtil.nullToEmpty(fmdo.getProvFreeForm()), StringUtil.nullToEmpty(fmdn.getProvFreeForm()))); } if (fmdo.isRestricted() != fmdn.isRestricted()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index 829d3922c80..9bb8992e789 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItem; import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch; @@ -54,7 +55,8 @@ @NamedQuery(name = "Dataverse.findByReleaseUserId", query="select object(o) from Dataverse as o where o.releaseUser.id =:releaseUserId order by o.name"), @NamedQuery(name = "Dataverse.filterByAlias", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.alias) LIKE :alias order by dv.alias"), @NamedQuery(name = "Dataverse.filterByAliasNameAffiliation", query="SELECT dv FROM Dataverse dv WHERE (LOWER(dv.alias) LIKE :alias) OR (LOWER(dv.name) LIKE :name) OR (LOWER(dv.affiliation) LIKE :affiliation) order by dv.alias"), - @NamedQuery(name = "Dataverse.filterByName", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.name) LIKE :name order by dv.alias") + @NamedQuery(name = "Dataverse.filterByName", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.name) LIKE :name order by dv.alias"), + @NamedQuery(name = "Dataverse.countAll", query = "SELECT COUNT(dv) FROM Dataverse dv") }) @Entity @Table(indexes = {@Index(columnList="defaultcontributorrole_id") @@ -351,6 +353,17 @@ public void setMetadataBlockFacets(List metadataBlo this.metadataBlockFacets = metadataBlockFacets; } + @OneToMany(mappedBy = "dataverse") + private List dataverseFeaturedItems = new ArrayList<>(); + + public List getDataverseFeaturedItems() { + return this.dataverseFeaturedItems; + } + + public void setDataverseFeaturedItems(List dataverseFeaturedItems) { + this.dataverseFeaturedItems = dataverseFeaturedItems; + } + public List getParentGuestbooks() { List retList = new ArrayList<>(); Dataverse testDV = this; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index 78d5eaf3414..b751841da74 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -23,7 +23,6 @@ import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; import jakarta.persistence.TypedQuery; -//import jakarta.validation.constraints.NotNull; /** * @@ -40,6 +39,9 @@ public class DataverseRoleServiceBean implements java.io.Serializable { @EJB RoleAssigneeServiceBean roleAssigneeService; + + @EJB + DataverseServiceBean dataverseService; @EJB IndexServiceBean indexService; @EJB @@ -48,22 +50,23 @@ public class DataverseRoleServiceBean implements java.io.Serializable { IndexAsync indexAsync; public DataverseRole save(DataverseRole aRole) { - if (aRole.getId() == null) { + if (aRole.getId() == null) { // persist a new Role em.persist(aRole); - /** - * @todo Why would getId be null? Should we call - * indexDefinitionPoint here too? A: it's null for new roles. - */ - return aRole; - } else { - DataverseRole merged = em.merge(aRole); - /** - * @todo update permissionModificationTime here. - */ - IndexResponse indexDefinitionPountResult = indexDefinitionPoint(merged.getOwner()); - logger.info("aRole getId was not null. Indexing result: " + indexDefinitionPountResult); - return merged; + } else { // update an existing Role + aRole = em.merge(aRole); + } + + DvObject owner = aRole.getOwner(); + if(owner == null) { // Builtin Role + owner = dataverseService.findByAlias("root"); + } + + if(owner != null) { // owner may be null if a role is created before the root collection as in setup-all.sh + IndexResponse indexDefinitionPointResult = indexDefinitionPoint(owner); + logger.info("Indexing result: " + indexDefinitionPointResult); } + + return aRole; } public RoleAssignment save(RoleAssignment assignment) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 91b15f77111..c407aeb294f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -1219,4 +1219,12 @@ public void disableStorageQuota(StorageQuota storageQuota) { em.flush(); } } + + /** + * Returns the total number of Dataverses + * @return the number of dataverse in the database + */ + public long getDataverseCount() { + return em.createNamedQuery("Dataverse.countAll", Long.class).getSingleResult(); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java index 5dab43fbdbd..7bb93ea6dde 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java +++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java @@ -143,12 +143,14 @@ public String visit(DataFile df) { @Column(insertable = false, updatable = false) private String dtype; /* - * Add DOI related fields + * Add PID related fields */ private String protocol; private String authority; + private String separator; + @Temporal(value = TemporalType.TIMESTAMP) private Date globalIdCreateTime; @@ -323,6 +325,16 @@ public void setAuthority(String authority) { globalId=null; } + public String getSeparator() { + return separator; + } + + public void setSeparator(String separator) { + this.separator = separator; + //Remove cached value + globalId=null; + } + public Date getGlobalIdCreateTime() { return globalIdCreateTime; } @@ -353,11 +365,13 @@ public void setGlobalId( GlobalId pid ) { if ( pid == null ) { setProtocol(null); setAuthority(null); + setSeparator(null); setIdentifier(null); } else { //These reset globalId=null setProtocol(pid.getProtocol()); setAuthority(pid.getAuthority()); + setSeparator(pid.getSeparator()); setIdentifier(pid.getIdentifier()); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index 0561fed8a97..0f211dc6713 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItemServiceBean; import edu.harvard.iq.dataverse.util.cache.CacheFactoryBean; import edu.harvard.iq.dataverse.engine.DataverseEngine; import edu.harvard.iq.dataverse.authorization.Permission; @@ -184,7 +185,10 @@ public class EjbDataverseEngine { ConfirmEmailServiceBean confirmEmailService; @EJB - StorageUseServiceBean storageUseService; + StorageUseServiceBean storageUseService; + + @EJB + DataverseFeaturedItemServiceBean dataverseFeaturedItemServiceBean; @EJB EjbDataverseEngineInner innerEngine; @@ -522,6 +526,11 @@ public DatasetFieldServiceBean dsField() { return dsField; } + @Override + public DataverseFeaturedItemServiceBean dataverseFeaturedItems() { + return dataverseFeaturedItemServiceBean; + } + @Override public StorageUseServiceBean storageUse() { return storageUseService; diff --git a/src/main/java/edu/harvard/iq/dataverse/ExternalIdentifier.java b/src/main/java/edu/harvard/iq/dataverse/ExternalIdentifier.java index 8c4fb6b1325..cf1dcfb3c15 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ExternalIdentifier.java +++ b/src/main/java/edu/harvard/iq/dataverse/ExternalIdentifier.java @@ -13,8 +13,10 @@ public enum ExternalIdentifier { // note: DAI is missing from this list, because it doesn't have resolvable URL ResearcherID("ResearcherID", "https://publons.com/researcher/%s/", "^[A-Z\\d][A-Z\\d-]+[A-Z\\d]$"), ScopusID("ScopusID", "https://www.scopus.com/authid/detail.uri?authorId=%s", "^\\d*$"), - //Requiring ROR to be URL form as we use it where there is no id type field and matching any 9 digit number starting with 0 seems a bit aggressive - ROR("ROR", "https://ror.org/%s", "^(https:\\/\\/ror.org\\/)0[a-hj-km-np-tv-z|0-9]{6}[0-9]{2}$"); + // ROR regex from https://ror.readme.io/docs/identifier + ROR("ROR", "https://ror.org/%s", "^0[a-hj-km-np-tv-z|0-9]{6}[0-9]{2}$"), + // In some contexts, we check for the full ROR URL. + ROR_FULL_URL("ROR", "https://ror.org/%s", "^(https:\\/\\/ror.org\\/)0[a-hj-km-np-tv-z|0-9]{6}[0-9]{2}$"); private String name; private String template; diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index 5717da38f29..3df3b095472 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -708,7 +708,7 @@ private List loadFileMetadataTabList() { FileMetadata fmd = datafileService.findFileMetadataByDatasetVersionIdAndDataFileId(versionLoop.getId(), df.getId()); if (fmd != null) { fmd.setContributorNames(datasetVersionService.getContributorsNames(versionLoop)); - FileVersionDifference fvd = new FileVersionDifference(fmd, getPreviousFileMetadata(fmd)); + FileVersionDifference fvd = new FileVersionDifference(fmd, getPreviousFileMetadata(fmd), true); fmd.setFileVersionDifference(fvd); retList.add(fmd); foundFmd = true; @@ -720,7 +720,7 @@ private List loadFileMetadataTabList() { FileMetadata dummy = new FileMetadata(); dummy.setDatasetVersion(versionLoop); dummy.setDataFile(null); - FileVersionDifference fvd = new FileVersionDifference(dummy, getPreviousFileMetadata(versionLoop)); + FileVersionDifference fvd = new FileVersionDifference(dummy, getPreviousFileMetadata(versionLoop), true); dummy.setFileVersionDifference(fvd); retList.add(dummy); } diff --git a/src/main/java/edu/harvard/iq/dataverse/GlobalId.java b/src/main/java/edu/harvard/iq/dataverse/GlobalId.java index 1c8783c5bd5..058a6269b57 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GlobalId.java +++ b/src/main/java/edu/harvard/iq/dataverse/GlobalId.java @@ -63,6 +63,10 @@ public String getAuthority() { return authority; } + public String getSeparator() { + return separator; + } + public String getIdentifier() { return identifier; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 3c1074b75bb..6d54f3b6971 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -645,6 +645,8 @@ protected T execCommand( Command cmd ) throws WrappedResponse { } } catch (InvalidFieldsCommandException ex) { throw new WrappedResponse(ex, badRequest(ex.getMessage(), ex.getFieldErrors())); + } catch (InvalidCommandArgumentsException ex) { + throw new WrappedResponse(ex, error(Status.BAD_REQUEST, ex.getMessage())); } catch (CommandException ex) { Logger.getLogger(AbstractApiBean.class.getName()).log(Level.SEVERE, "Error while executing command " + cmd, ex); throw new WrappedResponse(ex, error(Status.INTERNAL_SERVER_ERROR, ex.getMessage())); @@ -831,6 +833,18 @@ protected Response badRequest(String msg, Map fieldErrors) { .build(); } + /** + * In short, your password is fine but you don't have permission. + * + * "The 403 (Forbidden) status code indicates that the server understood the + * request but refuses to authorize it. A server that wishes to make public + * why the request has been forbidden can describe that reason in the + * response payload (if any). + * + * If authentication credentials were provided in the request, the server + * considers them insufficient to grant access." -- + * https://datatracker.ietf.org/doc/html/rfc7231#section-6.5.3 + */ protected Response forbidden( String msg ) { return error( Status.FORBIDDEN, msg ); } @@ -852,9 +866,17 @@ protected Response permissionError( PermissionException pe ) { } protected Response permissionError( String message ) { - return unauthorized( message ); + return forbidden( message ); } + /** + * In short, bad password. + * + * "The 401 (Unauthorized) status code indicates that the request has not + * been applied because it lacks valid authentication credentials for the + * target resource." -- + * https://datatracker.ietf.org/doc/html/rfc7235#section-3.1 + */ protected Response unauthorized( String message ) { return error( Status.UNAUTHORIZED, message ); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 16ac884180b..2a27c89eaaa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -6,32 +6,7 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.AuxiliaryFile; -import edu.harvard.iq.dataverse.AuxiliaryFileServiceBean; -import edu.harvard.iq.dataverse.DataCitation; -import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.FileAccessRequest; -import edu.harvard.iq.dataverse.FileMetadata; -import edu.harvard.iq.dataverse.DataFileServiceBean; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.DatasetVersionServiceBean; -import edu.harvard.iq.dataverse.DatasetServiceBean; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DataverseRequestServiceBean; -import edu.harvard.iq.dataverse.DataverseRoleServiceBean; -import edu.harvard.iq.dataverse.DataverseServiceBean; -import edu.harvard.iq.dataverse.DataverseSession; -import edu.harvard.iq.dataverse.DataverseTheme; -import edu.harvard.iq.dataverse.FileDownloadServiceBean; -import edu.harvard.iq.dataverse.GuestbookResponse; -import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; -import edu.harvard.iq.dataverse.PermissionServiceBean; -import edu.harvard.iq.dataverse.PermissionsWrapper; -import edu.harvard.iq.dataverse.RoleAssignment; -import edu.harvard.iq.dataverse.UserNotification; -import edu.harvard.iq.dataverse.UserNotificationServiceBean; -import edu.harvard.iq.dataverse.ThemeWidgetFragment; +import edu.harvard.iq.dataverse.*; import static edu.harvard.iq.dataverse.api.Datasets.handleVersion; @@ -52,18 +27,12 @@ import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.datavariable.DataVariable; import edu.harvard.iq.dataverse.datavariable.VariableServiceBean; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItem; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItemServiceBean; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetDraftDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetLatestAccessibleDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.GetSpecificPublishedDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.RequestAccessCommand; -import edu.harvard.iq.dataverse.engine.command.impl.RevokeRoleCommand; -import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.*; import edu.harvard.iq.dataverse.export.DDIExportServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry; @@ -88,7 +57,6 @@ import java.util.Arrays; import java.util.Date; import java.util.List; -import java.util.Properties; import java.util.logging.Level; import jakarta.inject.Inject; import jakarta.json.Json; @@ -133,7 +101,6 @@ import org.eclipse.microprofile.openapi.annotations.Operation; import org.eclipse.microprofile.openapi.annotations.media.Content; -import org.eclipse.microprofile.openapi.annotations.media.Schema; import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody; import org.eclipse.microprofile.openapi.annotations.responses.APIResponse; import org.eclipse.microprofile.openapi.annotations.responses.APIResponses; @@ -199,6 +166,8 @@ public class Access extends AbstractApiBean { PermissionsWrapper permissionsWrapper; @Inject MakeDataCountLoggingServiceBean mdcLogService; + @Inject + DataverseFeaturedItemServiceBean dataverseFeaturedItemServiceBean; //@EJB @@ -2015,4 +1984,24 @@ private URI handleCustomZipDownload(User user, String customZipServiceUrl, Strin } return redirectUri; } + + @GET + @AuthRequired + @Produces({"image/png"}) + @Path("dataverseFeaturedItemImage/{itemId}") + public InputStream getDataverseFeatureItemImage(@Context ContainerRequestContext crc, @PathParam("itemId") Long itemId) { + DataverseFeaturedItem dataverseFeaturedItem; + try { + dataverseFeaturedItem = execCommand(new GetDataverseFeaturedItemCommand(createDataverseRequest(getRequestUser(crc)), dataverseFeaturedItemServiceBean.findById(itemId))); + } catch (WrappedResponse wr) { + logger.warning("Cannot locate a dataverse featured item with id " + itemId); + return null; + } + try { + return dataverseFeaturedItemServiceBean.getImageFileAsInputStream(dataverseFeaturedItem); + } catch (IOException e) { + logger.warning("Error while obtaining the input stream for the image file associated with the dataverse featured item with id " + itemId); + return null; + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 152bcf5066e..2d850cc092f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1,11 +1,30 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.BannerMessage; +import edu.harvard.iq.dataverse.BannerMessageServiceBean; +import edu.harvard.iq.dataverse.BannerMessageText; +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DataFileServiceBean; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.DatasetVersionServiceBean; +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseRequestServiceBean; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.DataverseSession; +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.DvObjectServiceBean; +import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.validation.EMailValidator; +import edu.harvard.iq.dataverse.EjbDataverseEngine; +import edu.harvard.iq.dataverse.Template; +import edu.harvard.iq.dataverse.TemplateServiceBean; +import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.api.dto.RoleDTO; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; @@ -49,7 +68,8 @@ import java.io.InputStream; import java.io.StringReader; import java.nio.charset.StandardCharsets; -import java.util.*; +import java.util.Collections; +import java.util.Map; import java.util.Map.Entry; import java.util.function.Predicate; import java.util.logging.Level; @@ -65,6 +85,7 @@ import org.apache.commons.io.IOUtils; +import java.util.List; import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationProvidersRegistrationServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; @@ -101,7 +122,9 @@ import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.rolesToJson; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; - +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; import jakarta.inject.Inject; import jakarta.json.JsonArray; import jakarta.persistence.Query; @@ -109,6 +132,7 @@ import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.StreamingOutput; import java.nio.file.Paths; +import java.util.TreeMap; /** * Where the secure, setup API calls live. @@ -990,6 +1014,22 @@ public Response createNewBuiltinRole(RoleDTO roleDto) { actionLogSvc.log(alr); } } + @Path("roles/{id}") + @PUT + public Response updateBuiltinRole(RoleDTO roleDto, @PathParam("id") long roleId) { + ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "updateBuiltInRole") + .setInfo(roleDto.getAlias() + ":" + roleDto.getDescription()); + try { + DataverseRole role = roleDto.updateRoleFromDTO(rolesSvc.find(roleId)); + return ok(json(rolesSvc.save(role))); + } catch (Exception e) { + alr.setActionResult(ActionLogRecord.Result.InternalError); + alr.setInfo(alr.getInfo() + "// " + e.getMessage()); + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } finally { + actionLogSvc.log(alr); + } + } @Path("roles") @GET diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java index 907295ad848..cbb0f4ffcfd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java @@ -42,6 +42,7 @@ import java.util.logging.Logger; import jakarta.persistence.NoResultException; import jakarta.persistence.TypedQuery; +import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.core.Response.Status; import java.io.BufferedInputStream; @@ -545,4 +546,19 @@ public static String getDataverseLangDirectory() { return dataverseLangDirectory; } + /** + * Set setDisplayOnCreate for a DatasetFieldType. + */ + @POST + @Path("/setDisplayOnCreate") + public Response setDisplayOnCreate(@QueryParam("datasetFieldType") String datasetFieldTypeIn, @QueryParam("setDisplayOnCreate") boolean setDisplayOnCreateIn) { + DatasetFieldType dft = datasetFieldService.findByName(datasetFieldTypeIn); + if (dft == null) { + return error(Status.NOT_FOUND, "Cound not find a DatasetFieldType by looking up " + datasetFieldTypeIn); + } + dft.setDisplayOnCreate(setDisplayOnCreateIn); + DatasetFieldType saved = datasetFieldService.save(dft); + return ok("DisplayOnCreate for DatasetFieldType " + saved.getName() + " is now " + saved.isDisplayOnCreate()); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 2ec10816acc..1a7c6dc5de3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -421,15 +421,16 @@ public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @Pa @GET @AuthRequired @Path("{id}/versions") - public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("excludeFiles") Boolean excludeFiles, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) { + public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("excludeFiles") Boolean excludeFiles,@QueryParam("excludeMetadataBlocks") Boolean excludeMetadataBlocks, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) { return response( req -> { Dataset dataset = findDatasetOrDie(id); Boolean deepLookup = excludeFiles == null ? true : !excludeFiles; + Boolean includeMetadataBlocks = excludeMetadataBlocks == null ? true : !excludeMetadataBlocks; return ok( execCommand( new ListVersionsCommand(req, dataset, offset, limit, deepLookup) ) .stream() - .map( d -> json(d, deepLookup) ) + .map( d -> json(d, deepLookup, includeMetadataBlocks) ) .collect(toJsonArray())); }, getRequestUser(crc)); } @@ -441,6 +442,7 @@ public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("excludeFiles") Boolean excludeFiles, + @QueryParam("excludeMetadataBlocks") Boolean excludeMetadataBlocks, @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, @QueryParam("returnOwners") boolean returnOwners, @Context UriInfo uriInfo, @@ -466,11 +468,12 @@ public Response getVersion(@Context ContainerRequestContext crc, if (excludeFiles == null ? true : !excludeFiles) { requestedDatasetVersion = datasetversionService.findDeep(requestedDatasetVersion.getId()); } + Boolean includeMetadataBlocks = excludeMetadataBlocks == null ? true : !excludeMetadataBlocks; JsonObjectBuilder jsonBuilder = json(requestedDatasetVersion, null, - excludeFiles == null ? true : !excludeFiles, - returnOwners); + excludeFiles == null ? true : !excludeFiles, + returnOwners, includeMetadataBlocks); return ok(jsonBuilder); }, getRequestUser(crc)); @@ -1184,8 +1187,14 @@ private String validateDatasetFieldValues(List fields) { if (dsf.getDatasetFieldType().isAllowMultiples() && dsf.getControlledVocabularyValues().isEmpty() && dsf.getDatasetFieldCompoundValues().isEmpty() && dsf.getDatasetFieldValues().isEmpty()) { error.append("Empty multiple value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" "); - } else if (!dsf.getDatasetFieldType().isAllowMultiples() && dsf.getSingleValue().getValue().isEmpty()) { - error.append("Empty value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" "); + } else if (!dsf.getDatasetFieldType().isAllowMultiples()) { + if (dsf.getDatasetFieldType().isControlledVocabulary() && dsf.getSingleControlledVocabularyValue().getStrValue().isEmpty()) { + error.append("Empty cvoc value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" "); + } else if (dsf.getDatasetFieldType().isCompound() && dsf.getDatasetFieldCompoundValues().isEmpty()) { + error.append("Empty compound value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" "); + } else if (!dsf.getDatasetFieldType().isControlledVocabulary() && !dsf.getDatasetFieldType().isCompound() && dsf.getSingleValue().getValue().isEmpty()) { + error.append("Empty value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" "); + } } } @@ -5193,14 +5202,10 @@ public Response resetPidGenerator(@Context ContainerRequestContext crc, @PathPar @Path("datasetTypes") public Response getDatasetTypes() { JsonArrayBuilder jab = Json.createArrayBuilder(); - List datasetTypes = datasetTypeSvc.listAll(); - for (DatasetType datasetType : datasetTypes) { - JsonObjectBuilder job = Json.createObjectBuilder(); - job.add("id", datasetType.getId()); - job.add("name", datasetType.getName()); - jab.add(job); + for (DatasetType datasetType : datasetTypeSvc.listAll()) { + jab.add(datasetType.toJson()); } - return ok(jab.build()); + return ok(jab); } @GET @@ -5315,4 +5320,52 @@ public Response deleteDatasetType(@Context ContainerRequestContext crc, @PathPar } } + @AuthRequired + @PUT + @Path("datasetTypes/{idOrName}") + public Response updateDatasetTypeLinksWithMetadataBlocks(@Context ContainerRequestContext crc, @PathParam("idOrName") String idOrName, String jsonBody) { + DatasetType datasetType = null; + if (StringUtils.isNumeric(idOrName)) { + try { + long id = Long.parseLong(idOrName); + datasetType = datasetTypeSvc.getById(id); + } catch (NumberFormatException ex) { + return error(NOT_FOUND, "Could not find a dataset type with id " + idOrName); + } + } else { + datasetType = datasetTypeSvc.getByName(idOrName); + } + JsonArrayBuilder datasetTypesBefore = Json.createArrayBuilder(); + for (MetadataBlock metadataBlock : datasetType.getMetadataBlocks()) { + datasetTypesBefore.add(metadataBlock.getName()); + } + JsonArrayBuilder datasetTypesAfter = Json.createArrayBuilder(); + List metadataBlocksToSave = new ArrayList<>(); + if (jsonBody != null && !jsonBody.isEmpty()) { + JsonArray json = JsonUtil.getJsonArray(jsonBody); + for (JsonString jsonValue : json.getValuesAs(JsonString.class)) { + String name = jsonValue.getString(); + MetadataBlock metadataBlock = metadataBlockSvc.findByName(name); + if (metadataBlock != null) { + metadataBlocksToSave.add(metadataBlock); + datasetTypesAfter.add(name); + } else { + String availableBlocks = metadataBlockSvc.listMetadataBlocks().stream().map(MetadataBlock::getName).collect(Collectors.joining(", ")); + return badRequest("Metadata block not found: " + name + ". Available metadata blocks: " + availableBlocks); + } + } + } + try { + execCommand(new UpdateDatasetTypeLinksToMetadataBlocksCommand(createDataverseRequest(getRequestUser(crc)), datasetType, metadataBlocksToSave)); + return ok(Json.createObjectBuilder() + .add("linkedMetadataBlocks", Json.createObjectBuilder() + .add("before", datasetTypesBefore) + .add("after", datasetTypesAfter)) + ); + + } catch (WrappedResponse ex) { + return ex.getResponse(); + } + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItems.java b/src/main/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItems.java new file mode 100644 index 00000000000..a77ea000415 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItems.java @@ -0,0 +1,69 @@ +package edu.harvard.iq.dataverse.api; + +import edu.harvard.iq.dataverse.api.auth.AuthRequired; +import edu.harvard.iq.dataverse.api.dto.UpdatedDataverseFeaturedItemDTO; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItem; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItemServiceBean; +import edu.harvard.iq.dataverse.engine.command.impl.*; +import edu.harvard.iq.dataverse.util.BundleUtil; +import jakarta.ejb.Stateless; +import jakarta.inject.Inject; +import jakarta.ws.rs.*; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; +import org.glassfish.jersey.media.multipart.FormDataParam; + +import java.io.InputStream; +import java.text.MessageFormat; + +import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; + +@Stateless +@Path("dataverseFeaturedItems") +public class DataverseFeaturedItems extends AbstractApiBean { + + @Inject + DataverseFeaturedItemServiceBean dataverseFeaturedItemServiceBean; + + @DELETE + @AuthRequired + @Path("{id}") + public Response deleteFeaturedItem(@Context ContainerRequestContext crc, @PathParam("id") Long id) { + try { + DataverseFeaturedItem dataverseFeaturedItem = dataverseFeaturedItemServiceBean.findById(id); + if (dataverseFeaturedItem == null) { + throw new WrappedResponse(error(Response.Status.NOT_FOUND, MessageFormat.format(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.notFound"), id))); + } + execCommand(new DeleteDataverseFeaturedItemCommand(createDataverseRequest(getRequestUser(crc)), dataverseFeaturedItem)); + return ok(MessageFormat.format(BundleUtil.getStringFromBundle("dataverseFeaturedItems.delete.successful"), id)); + } catch (WrappedResponse e) { + return e.getResponse(); + } + } + + @PUT + @AuthRequired + @Consumes(MediaType.MULTIPART_FORM_DATA) + @Path("{id}") + public Response updateFeaturedItem(@Context ContainerRequestContext crc, + @PathParam("id") Long id, + @FormDataParam("content") String content, + @FormDataParam("displayOrder") int displayOrder, + @FormDataParam("keepFile") boolean keepFile, + @FormDataParam("file") InputStream imageFileInputStream, + @FormDataParam("file") FormDataContentDisposition contentDispositionHeader) { + try { + DataverseFeaturedItem dataverseFeaturedItem = dataverseFeaturedItemServiceBean.findById(id); + if (dataverseFeaturedItem == null) { + throw new WrappedResponse(error(Response.Status.NOT_FOUND, MessageFormat.format(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.notFound"), id))); + } + UpdatedDataverseFeaturedItemDTO updatedDataverseFeaturedItemDTO = UpdatedDataverseFeaturedItemDTO.fromFormData(content, displayOrder, keepFile, imageFileInputStream, contentDispositionHeader); + return ok(json(execCommand(new UpdateDataverseFeaturedItemCommand(createDataverseRequest(getRequestUser(crc)), dataverseFeaturedItem, updatedDataverseFeaturedItemDTO)))); + } catch (WrappedResponse e) { + return e.getResponse(); + } + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index c494a5c9ccd..81f84ec2fbf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1,5 +1,7 @@ package edu.harvard.iq.dataverse.api; +import com.google.common.collect.Lists; +import com.google.api.client.util.ArrayMap; import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.api.datadeposit.SwordServiceBean; @@ -15,7 +17,10 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.dataset.DatasetType; import edu.harvard.iq.dataverse.dataverse.DataverseUtil; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItem; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItemServiceBean; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.impl.*; import edu.harvard.iq.dataverse.pidproviders.PidProvider; @@ -33,7 +38,7 @@ import edu.harvard.iq.dataverse.util.json.JsonPrinter; import edu.harvard.iq.dataverse.util.json.JsonUtil; -import java.io.StringReader; +import java.io.*; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; @@ -59,8 +64,6 @@ import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.Response.Status; -import java.io.IOException; -import java.io.OutputStream; import java.text.MessageFormat; import java.text.SimpleDateFormat; import java.util.stream.Collectors; @@ -68,6 +71,10 @@ import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.StreamingOutput; +import org.glassfish.jersey.media.multipart.FormDataBodyPart; +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; +import org.glassfish.jersey.media.multipart.FormDataParam; + import javax.xml.stream.XMLStreamException; /** @@ -111,7 +118,10 @@ public class Dataverses extends AbstractApiBean { @EJB PermissionServiceBean permissionService; - + + @EJB + DataverseFeaturedItemServiceBean dataverseFeaturedItemServiceBean; + @POST @AuthRequired public Response addRoot(@Context ContainerRequestContext crc, String body) { @@ -195,7 +205,7 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod List facets = parseFacets(body); AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO, true)); + dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO)); return ok(json(dataverse)); } catch (WrappedResponse ww) { @@ -221,31 +231,60 @@ private DataverseDTO parseAndValidateUpdateDataverseRequestBody(String body) thr } } + /* + return null - ignore + return empty list - delete and inherit from parent + return non-empty list - update + */ private List parseInputLevels(String body, Dataverse dataverse) throws WrappedResponse { JsonObject metadataBlocksJson = getMetadataBlocksJson(body); - if (metadataBlocksJson == null) { - return null; + JsonArray inputLevelsArray = metadataBlocksJson != null ? metadataBlocksJson.getJsonArray("inputLevels") : null; + + if (metadataBlocksJson != null && metadataBlocksJson.containsKey("inheritMetadataBlocksFromParent") && metadataBlocksJson.getBoolean("inheritMetadataBlocksFromParent")) { + return Lists.newArrayList(); // delete } - JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels"); - return inputLevelsArray != null ? parseInputLevels(inputLevelsArray, dataverse) : null; + return parseInputLevels(inputLevelsArray, dataverse); } + /* + return null - ignore + return empty list - delete and inherit from parent + return non-empty list - update + */ private List parseMetadataBlocks(String body) throws WrappedResponse { JsonObject metadataBlocksJson = getMetadataBlocksJson(body); - if (metadataBlocksJson == null) { - return null; + JsonArray metadataBlocksArray = metadataBlocksJson != null ? metadataBlocksJson.getJsonArray("metadataBlockNames") : null; + + if (metadataBlocksArray != null && metadataBlocksJson.containsKey("inheritMetadataBlocksFromParent") && metadataBlocksJson.getBoolean("inheritMetadataBlocksFromParent")) { + String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.metadatablocks.error.containslistandinheritflag"), "metadataBlockNames", "inheritMetadataBlocksFromParent"); + throw new WrappedResponse(badRequest(errorMessage)); + } + if (metadataBlocksJson != null && metadataBlocksJson.containsKey("inheritMetadataBlocksFromParent") && metadataBlocksJson.getBoolean("inheritMetadataBlocksFromParent")) { + return Lists.newArrayList(); // delete and inherit from parent } - JsonArray metadataBlocksArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); - return metadataBlocksArray != null ? parseNewDataverseMetadataBlocks(metadataBlocksArray) : null; + + return parseNewDataverseMetadataBlocks(metadataBlocksArray); } + /* + return null - ignore + return empty list - delete and inherit from parent + return non-empty list - update + */ private List parseFacets(String body) throws WrappedResponse { JsonObject metadataBlocksJson = getMetadataBlocksJson(body); - if (metadataBlocksJson == null) { - return null; + JsonArray facetsArray = metadataBlocksJson != null ? metadataBlocksJson.getJsonArray("facetIds") : null; + + if (facetsArray != null && metadataBlocksJson.containsKey("inheritFacetsFromParent") && metadataBlocksJson.getBoolean("inheritFacetsFromParent")) { + String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.metadatablocks.error.containslistandinheritflag"), "facetIds", "inheritFacetsFromParent"); + throw new WrappedResponse(badRequest(errorMessage)); } - JsonArray facetsArray = metadataBlocksJson.getJsonArray("facetIds"); - return facetsArray != null ? parseFacets(facetsArray) : null; + + if (metadataBlocksJson != null && metadataBlocksJson.containsKey("inheritFacetsFromParent") && metadataBlocksJson.getBoolean("inheritFacetsFromParent")) { + return Lists.newArrayList(); // delete and inherit from parent + } + + return parseFacets(facetsArray); } private JsonObject getMetadataBlocksJson(String body) { @@ -277,6 +316,9 @@ private Response handleEJBException(EJBException ex, String action) { } private List parseNewDataverseMetadataBlocks(JsonArray metadataBlockNamesArray) throws WrappedResponse { + if (metadataBlockNamesArray == null) { + return null; + } List selectedMetadataBlocks = new ArrayList<>(); for (JsonString metadataBlockName : metadataBlockNamesArray.getValuesAs(JsonString.class)) { MetadataBlock metadataBlock = metadataBlockSvc.findByName(metadataBlockName.getString()); @@ -745,6 +787,9 @@ public Response updateInputLevels(@Context ContainerRequestContext crc, @PathPar } private List parseInputLevels(JsonArray inputLevelsArray, Dataverse dataverse) throws WrappedResponse { + if (inputLevelsArray == null) { + return null; + } List newInputLevels = new ArrayList<>(); for (JsonValue value : inputLevelsArray) { JsonObject inputLevel = (JsonObject) value; @@ -771,6 +816,9 @@ private List parseInputLevels(JsonArray inputLevel } private List parseFacets(JsonArray facetsArray) throws WrappedResponse { + if (facetsArray == null) { + return null; + } List facets = new LinkedList<>(); for (JsonString facetId : facetsArray.getValuesAs(JsonString.class)) { DatasetFieldType dsfType = findDatasetFieldType(facetId.getString()); @@ -801,17 +849,20 @@ public Response deleteDataverseLinkingDataverse(@Context ContainerRequestContext public Response listMetadataBlocks(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("onlyDisplayedOnCreate") boolean onlyDisplayedOnCreate, - @QueryParam("returnDatasetFieldTypes") boolean returnDatasetFieldTypes) { + @QueryParam("returnDatasetFieldTypes") boolean returnDatasetFieldTypes, + @QueryParam("datasetType") String datasetTypeIn) { try { Dataverse dataverse = findDataverseOrDie(dvIdtf); + DatasetType datasetType = datasetTypeSvc.getByName(datasetTypeIn); final List metadataBlocks = execCommand( new ListMetadataBlocksCommand( createDataverseRequest(getRequestUser(crc)), dataverse, - onlyDisplayedOnCreate + onlyDisplayedOnCreate, + datasetType ) ); - return ok(json(metadataBlocks, returnDatasetFieldTypes, onlyDisplayedOnCreate, dataverse)); + return ok(json(metadataBlocks, returnDatasetFieldTypes, onlyDisplayedOnCreate, dataverse, datasetType)); } catch (WrappedResponse we) { return we.getResponse(); } @@ -1729,4 +1780,131 @@ public Response getUserPermissionsOnDataverse(@Context ContainerRequestContext c jsonObjectBuilder.add("canDeleteDataverse", permissionService.userOn(requestUser, dataverse).has(Permission.DeleteDataverse)); return ok(jsonObjectBuilder); } + + @POST + @AuthRequired + @Consumes(MediaType.MULTIPART_FORM_DATA) + @Path("{identifier}/featuredItems") + public Response createFeaturedItem(@Context ContainerRequestContext crc, + @PathParam("identifier") String dvIdtf, + @FormDataParam("content") String content, + @FormDataParam("displayOrder") int displayOrder, + @FormDataParam("file") InputStream imageFileInputStream, + @FormDataParam("file") FormDataContentDisposition contentDispositionHeader) { + Dataverse dataverse; + try { + dataverse = findDataverseOrDie(dvIdtf); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + NewDataverseFeaturedItemDTO newDataverseFeaturedItemDTO = NewDataverseFeaturedItemDTO.fromFormData(content, displayOrder, imageFileInputStream, contentDispositionHeader); + try { + DataverseFeaturedItem dataverseFeaturedItem = execCommand(new CreateDataverseFeaturedItemCommand( + createDataverseRequest(getRequestUser(crc)), + dataverse, + newDataverseFeaturedItemDTO + )); + return ok(json(dataverseFeaturedItem)); + } catch (WrappedResponse e) { + return e.getResponse(); + } + } + + @GET + @AuthRequired + @Path("{identifier}/featuredItems") + public Response listFeaturedItems(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) { + try { + Dataverse dataverse = findDataverseOrDie(dvIdtf); + List featuredItems = execCommand(new ListDataverseFeaturedItemsCommand(createDataverseRequest(getRequestUser(crc)), dataverse)); + return ok(jsonDataverseFeaturedItems(featuredItems)); + } catch (WrappedResponse e) { + return e.getResponse(); + } + } + + @PUT + @AuthRequired + @Consumes(MediaType.MULTIPART_FORM_DATA) + @Path("{dataverseId}/featuredItems") + public Response updateFeaturedItems( + @Context ContainerRequestContext crc, + @PathParam("dataverseId") String dvIdtf, + @FormDataParam("id") List ids, + @FormDataParam("content") List contents, + @FormDataParam("displayOrder") List displayOrders, + @FormDataParam("keepFile") List keepFiles, + @FormDataParam("fileName") List fileNames, + @FormDataParam("file") List files) { + try { + if (ids == null || contents == null || displayOrders == null || keepFiles == null || fileNames == null) { + throw new WrappedResponse(error(Response.Status.BAD_REQUEST, + BundleUtil.getStringFromBundle("dataverse.update.featuredItems.error.missingInputParams"))); + } + + int size = ids.size(); + if (contents.size() != size || displayOrders.size() != size || keepFiles.size() != size || fileNames.size() != size) { + throw new WrappedResponse(error(Response.Status.BAD_REQUEST, + BundleUtil.getStringFromBundle("dataverse.update.featuredItems.error.inputListsSizeMismatch"))); + } + + Dataverse dataverse = findDataverseOrDie(dvIdtf); + List newItems = new ArrayList<>(); + Map itemsToUpdate = new HashMap<>(); + + for (int i = 0; i < contents.size(); i++) { + String fileName = fileNames.get(i); + InputStream fileInputStream = null; + FormDataContentDisposition contentDisposition = null; + + if (files != null) { + Optional matchingFile = files.stream() + .filter(file -> file.getFormDataContentDisposition().getFileName().equals(fileName)) + .findFirst(); + + if (matchingFile.isPresent()) { + fileInputStream = matchingFile.get().getValueAs(InputStream.class); + contentDisposition = matchingFile.get().getFormDataContentDisposition(); + } + } + + if (ids.get(i) == 0) { + newItems.add(NewDataverseFeaturedItemDTO.fromFormData( + contents.get(i), displayOrders.get(i), fileInputStream, contentDisposition)); + } else { + DataverseFeaturedItem existingItem = dataverseFeaturedItemServiceBean.findById(ids.get(i)); + if (existingItem == null) { + throw new WrappedResponse(error(Response.Status.NOT_FOUND, + MessageFormat.format(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.notFound"), ids.get(i)))); + } + itemsToUpdate.put(existingItem, UpdatedDataverseFeaturedItemDTO.fromFormData( + contents.get(i), displayOrders.get(i), keepFiles.get(i), fileInputStream, contentDisposition)); + } + } + + List featuredItems = execCommand(new UpdateDataverseFeaturedItemsCommand( + createDataverseRequest(getRequestUser(crc)), + dataverse, + newItems, + itemsToUpdate + )); + + return ok(jsonDataverseFeaturedItems(featuredItems)); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + } + + @DELETE + @AuthRequired + @Path("{identifier}/featuredItems") + public Response deleteFeaturedItems(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) { + try { + Dataverse dataverse = findDataverseOrDie(dvIdtf); + execCommand(new UpdateDataverseFeaturedItemsCommand(createDataverseRequest(getRequestUser(crc)), dataverse, new ArrayList<>(), new ArrayMap<>())); + return ok(BundleUtil.getStringFromBundle("dataverse.delete.featuredItems.success")); + } catch (WrappedResponse e) { + return e.getResponse(); + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ExternalToolsApi.java b/src/main/java/edu/harvard/iq/dataverse/api/ExternalToolsApi.java new file mode 100644 index 00000000000..92139d86caf --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/ExternalToolsApi.java @@ -0,0 +1,58 @@ +package edu.harvard.iq.dataverse.api; + +import edu.harvard.iq.dataverse.api.auth.AuthRequired; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import jakarta.inject.Inject; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; + +@Path("externalTools") +public class ExternalToolsApi extends AbstractApiBean { + + @Inject + ExternalTools externalTools; + + @GET + public Response getExternalTools() { + return externalTools.getExternalTools(); + } + + @GET + @Path("{id}") + public Response getExternalTool(@PathParam("id") long externalToolIdFromUser) { + return externalTools.getExternalTool(externalToolIdFromUser); + } + + @POST + @AuthRequired + public Response addExternalTool(@Context ContainerRequestContext crc, String manifest) { + Response notAuthorized = authorize(crc); + return notAuthorized == null ? externalTools.addExternalTool(manifest) : notAuthorized; + } + + @DELETE + @AuthRequired + @Path("{id}") + public Response deleteExternalTool(@Context ContainerRequestContext crc, @PathParam("id") long externalToolIdFromUser) { + Response notAuthorized = authorize(crc); + return notAuthorized == null ? externalTools.deleteExternalTool(externalToolIdFromUser) : notAuthorized; + } + + private Response authorize(ContainerRequestContext crc) { + try { + AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + if (!user.isSuperuser()) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, "Superusers only."); + } + return null; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Index.java b/src/main/java/edu/harvard/iq/dataverse/api/Index.java index c30a77acb58..bc9a8ae692b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Index.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Index.java @@ -44,6 +44,7 @@ import java.lang.reflect.Field; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; @@ -451,11 +452,11 @@ public Response clearOrphans(@QueryParam("sync") String sync) { public String getSolrSchema() { StringBuilder sb = new StringBuilder(); - - for (DatasetFieldType datasetField : datasetFieldService.findAllOrderedByName()) { + Map cvocTermUriMap = datasetFieldSvc.getCVocConf(true); + for (DatasetFieldType datasetFieldType : datasetFieldService.findAllOrderedByName()) { //ToDo - getSolrField() creates/returns a new object - just get it once and re-use - String nameSearchable = datasetField.getSolrField().getNameSearchable(); - SolrField.SolrType solrType = datasetField.getSolrField().getSolrType(); + String nameSearchable = datasetFieldType.getSolrField().getNameSearchable(); + SolrField.SolrType solrType = datasetFieldType.getSolrField().getSolrType(); String type = solrType.getType(); if (solrType.equals(SolrField.SolrType.EMAIL)) { /** @@ -474,7 +475,7 @@ public String getSolrSchema() { */ logger.info("email type detected (" + nameSearchable + ") See also https://github.com/IQSS/dataverse/issues/759"); } - String multivalued = datasetField.getSolrField().isAllowedToBeMultivalued().toString(); + String multivalued = Boolean.toString(datasetFieldType.getSolrField().isAllowedToBeMultivalued() || cvocTermUriMap.containsKey(datasetFieldType.getId())); // sb.append(" \n"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Search.java b/src/main/java/edu/harvard/iq/dataverse/api/Search.java index ba82f8f758b..bfae753d591 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Search.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Search.java @@ -211,13 +211,50 @@ public Response search( } value.add("count_in_response", solrSearchResults.size()); - if (showTypeCounts && !solrQueryResponse.getTypeFacetCategories().isEmpty()) { + + // we want to show the missing dvobject types with count = 0 + // per https://github.com/IQSS/dataverse/issues/11127 + + if (showTypeCounts) { JsonObjectBuilder objectTypeCounts = Json.createObjectBuilder(); - for (FacetCategory facetCategory : solrQueryResponse.getTypeFacetCategories()) { - for (FacetLabel facetLabel : facetCategory.getFacetLabel()) { - objectTypeCounts.add(facetLabel.getName(), facetLabel.getCount()); + if (!solrQueryResponse.getTypeFacetCategories().isEmpty()) { + boolean filesMissing = true; + boolean datasetsMissing = true; + boolean dataversesMissing = true; + for (FacetCategory facetCategory : solrQueryResponse.getTypeFacetCategories()) { + for (FacetLabel facetLabel : facetCategory.getFacetLabel()) { + objectTypeCounts.add(facetLabel.getName(), facetLabel.getCount()); + if (facetLabel.getName().equals((SearchConstants.UI_DATAVERSES))) { + dataversesMissing = false; + } + if (facetLabel.getName().equals((SearchConstants.UI_DATASETS))) { + datasetsMissing = false; + } + if (facetLabel.getName().equals((SearchConstants.UI_FILES))) { + filesMissing = false; + } + } + } + + if (solrQueryResponse.getTypeFacetCategories().size() < 3) { + if (dataversesMissing) { + objectTypeCounts.add(SearchConstants.UI_DATAVERSES, 0); + } + if (datasetsMissing) { + objectTypeCounts.add(SearchConstants.UI_DATASETS, 0); + } + if (filesMissing) { + objectTypeCounts.add(SearchConstants.UI_FILES, 0); + } } + } + if (showTypeCounts && solrQueryResponse.getTypeFacetCategories().isEmpty()) { + objectTypeCounts.add(SearchConstants.UI_DATAVERSES, 0); + objectTypeCounts.add(SearchConstants.UI_DATASETS, 0); + objectTypeCounts.add(SearchConstants.UI_FILES, 0); + } + value.add("total_count_per_object_type", objectTypeCounts); } /** diff --git a/src/main/java/edu/harvard/iq/dataverse/api/SendFeedbackAPI.java b/src/main/java/edu/harvard/iq/dataverse/api/SendFeedbackAPI.java new file mode 100644 index 00000000000..3bffcd042a3 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/SendFeedbackAPI.java @@ -0,0 +1,126 @@ +package edu.harvard.iq.dataverse.api; + +import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.api.auth.AuthRequired; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.branding.BrandingUtil; +import edu.harvard.iq.dataverse.engine.command.impl.CheckRateLimitForDatasetFeedbackCommand; +import edu.harvard.iq.dataverse.feedback.Feedback; +import edu.harvard.iq.dataverse.feedback.FeedbackUtil; +import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.cache.CacheFactoryBean; +import edu.harvard.iq.dataverse.util.json.JsonUtil; +import edu.harvard.iq.dataverse.validation.EMailValidator; +import jakarta.ejb.EJB; +import jakarta.json.*; +import jakarta.mail.internet.InternetAddress; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.Response; + +import java.text.MessageFormat; +import java.util.logging.Logger; + +@Path("sendfeedback") +public class SendFeedbackAPI extends AbstractApiBean { + private static final Logger logger = Logger.getLogger(SendFeedbackAPI.class.getCanonicalName()); + @EJB + MailServiceBean mailService; + @EJB + CacheFactoryBean cacheFactory; + /** + * This method mimics the contact form and sends an email to the contacts of the + * specified Collection/Dataset/DataFile, optionally ccing the support email + * address, or to the support email address when there is no target object. + **/ + @POST + @AuthRequired + public Response submitFeedback(@Context ContainerRequestContext crc, String jsonString) { + try { + JsonObject jsonObject = JsonUtil.getJsonObject(jsonString); + if (!jsonObject.containsKey("subject") || !jsonObject.containsKey("body")) { + return badRequest(BundleUtil.getStringFromBundle("sendfeedback.body.error.missingRequiredFields")); + } + + JsonNumber jsonNumber = jsonObject.containsKey("targetId") ? jsonObject.getJsonNumber("targetId") : null; + // idtf will hold the "targetId" or the "identifier". If neither is set then this is a general feedback to support + String idtf = jsonNumber != null ? jsonNumber.toString() : jsonObject.containsKey("identifier") ? jsonObject.getString("identifier") : null; + DvObject feedbackTarget = null; + + if (jsonNumber != null) { + feedbackTarget = dvObjSvc.findDvObject(jsonNumber.longValue()); + } else if (idtf != null) { + if (feedbackTarget == null) { + feedbackTarget = dataverseSvc.findByAlias(idtf); + } + if (feedbackTarget == null) { + feedbackTarget = dvObjSvc.findByGlobalId(idtf, DvObject.DType.Dataset); + } + if (feedbackTarget == null) { + feedbackTarget = dvObjSvc.findByGlobalId(idtf, DvObject.DType.DataFile); + } + } + + // feedbackTarget and idtf are both null this is a support feedback and is ok + if (feedbackTarget == null && idtf != null) { + return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("sendfeedback.request.error.targetNotFound")); + } + // Check for rate limit exceeded. + if (!cacheFactory.checkRate(getRequestUser(crc), new CheckRateLimitForDatasetFeedbackCommand(null, feedbackTarget))) { + return error(Response.Status.TOO_MANY_REQUESTS, BundleUtil.getStringFromBundle("sendfeedback.request.rateLimited")); + } + + DataverseSession dataverseSession = null; + String userMessage = sanitizeBody(jsonObject.getString("body")); + InternetAddress systemAddress = mailService.getSupportAddress().orElse(null); + String userEmail = getEmail(jsonObject, crc); + String messageSubject = jsonObject.getString("subject"); + String baseUrl = systemConfig.getDataverseSiteUrl(); + String installationBrandName = BrandingUtil.getInstallationBrandName(); + String supportTeamName = BrandingUtil.getSupportTeamName(systemAddress); + JsonArrayBuilder jab = Json.createArrayBuilder(); + Feedback feedback = FeedbackUtil.gatherFeedback(feedbackTarget, dataverseSession, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName, SendFeedbackDialog.ccSupport(feedbackTarget)); + jab.add(feedback.toLimitedJsonObjectBuilder()); + mailService.sendMail(feedback.getFromEmail(), feedback.getToEmail(), feedback.getCcEmail(), feedback.getSubject(), feedback.getBody()); + return ok(jab); + } catch (WrappedResponse resp) { + return resp.getResponse(); + } catch (JsonException je) { + return error(Response.Status.BAD_REQUEST, "Invalid JSON; error message: " + je.getMessage()); + } + } + + private String getEmail(JsonObject jsonObject, ContainerRequestContext crc) throws WrappedResponse { + String fromEmail = jsonObject.containsKey("fromEmail") ? jsonObject.getString("fromEmail") : ""; + if (fromEmail.isBlank() && crc != null) { + User user = getRequestUser(crc); + if (user instanceof AuthenticatedUser) { + fromEmail = ((AuthenticatedUser) user).getEmail(); + } + } + if (fromEmail == null || fromEmail.isBlank()) { + throw new WrappedResponse(badRequest(BundleUtil.getStringFromBundle("sendfeedback.fromEmail.error.missing"))); + } + if (!EMailValidator.isEmailValid(fromEmail)) { + throw new WrappedResponse(badRequest(MessageFormat.format(BundleUtil.getStringFromBundle("sendfeedback.fromEmail.error.invalid"), fromEmail))); + } + return fromEmail; + } + private String sanitizeBody (String body) throws WrappedResponse { + // remove malicious html + String sanitizedBody = body == null ? "" : body.replaceAll("\\<.*?>", ""); + + long limit = systemConfig.getContactFeedbackMessageSizeLimit(); + if (limit > 0 && sanitizedBody.length() > limit) { + throw new WrappedResponse(badRequest(MessageFormat.format(BundleUtil.getStringFromBundle("sendfeedback.body.error.exceedsLength"), sanitizedBody.length(), limit))); + } else if (sanitizedBody.length() == 0) { + throw new WrappedResponse(badRequest(BundleUtil.getStringFromBundle("sendfeedback.body.error.isEmpty"))); + } + + return sanitizedBody; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetDTO.java index 3fc31730ba2..ec8adfb4eef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetDTO.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetDTO.java @@ -12,6 +12,7 @@ public class DatasetDTO implements java.io.Serializable { private String identifier; private String protocol; private String authority; + private String separator; private String globalIdCreateTime; private String publisher; private String publicationDate; @@ -51,6 +52,14 @@ public void setAuthority(String authority) { this.authority = authority; } + public String getSeparator() { + return separator; + } + + public void setSeparator(String separator) { + this.separator = separator; + } + public String getGlobalIdCreateTime() { return globalIdCreateTime; } @@ -94,7 +103,7 @@ public void setPublicationDate(String publicationDate) { @Override public String toString() { - return "DatasetDTO{" + "id=" + id + ", identifier=" + identifier + ", protocol=" + protocol + ", authority=" + authority + ", globalIdCreateTime=" + globalIdCreateTime + ", datasetVersion=" + datasetVersion + ", dataFiles=" + dataFiles + '}'; + return "DatasetDTO{" + "id=" + id + ", identifier=" + identifier + ", protocol=" + protocol + ", authority=" + authority + ", separator=" + separator + ", globalIdCreateTime=" + globalIdCreateTime + ", datasetVersion=" + datasetVersion + ", dataFiles=" + dataFiles + '}'; } public void setMetadataLanguage(String metadataLanguage) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/NewDataverseFeaturedItemDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/NewDataverseFeaturedItemDTO.java new file mode 100644 index 00000000000..47003761abc --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/NewDataverseFeaturedItemDTO.java @@ -0,0 +1,61 @@ +package edu.harvard.iq.dataverse.api.dto; + +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; + +import java.io.InputStream; + +public class NewDataverseFeaturedItemDTO { + private String content; + private int displayOrder; + private InputStream imageFileInputStream; + private String imageFileName; + + public static NewDataverseFeaturedItemDTO fromFormData(String content, + int displayOrder, + InputStream imageFileInputStream, + FormDataContentDisposition contentDispositionHeader) { + NewDataverseFeaturedItemDTO newDataverseFeaturedItemDTO = new NewDataverseFeaturedItemDTO(); + + newDataverseFeaturedItemDTO.content = content; + newDataverseFeaturedItemDTO.displayOrder = displayOrder; + + if (imageFileInputStream != null) { + newDataverseFeaturedItemDTO.imageFileInputStream = imageFileInputStream; + newDataverseFeaturedItemDTO.imageFileName = contentDispositionHeader.getFileName(); + } + + return newDataverseFeaturedItemDTO; + } + + public void setContent(String content) { + this.content = content; + } + + public String getContent() { + return content; + } + + public void setDisplayOrder(int displayOrder) { + this.displayOrder = displayOrder; + } + + public int getDisplayOrder() { + return displayOrder; + } + + public void setImageFileInputStream(InputStream imageFileInputStream) { + this.imageFileInputStream = imageFileInputStream; + } + + public InputStream getImageFileInputStream() { + return imageFileInputStream; + } + + public void setImageFileName(String imageFileName) { + this.imageFileName = imageFileName; + } + + public String getImageFileName() { + return imageFileName; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/RoleDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/RoleDTO.java index 58e30ade584..5769ab430ad 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/dto/RoleDTO.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/RoleDTO.java @@ -47,11 +47,11 @@ public void setPermissions(String[] permissions) { this.permissions = permissions; } - public DataverseRole asRole() { - DataverseRole r = new DataverseRole(); + public DataverseRole updateRoleFromDTO(DataverseRole r) { r.setAlias(alias); r.setDescription(description); r.setName(name); + r.clearPermissions(); if (permissions != null) { if (permissions.length > 0) { if (permissions[0].trim().toLowerCase().equals("all")) { @@ -65,5 +65,9 @@ public DataverseRole asRole() { } return r; } + + public DataverseRole asRole() { + return updateRoleFromDTO(new DataverseRole()); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/UpdatedDataverseFeaturedItemDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/UpdatedDataverseFeaturedItemDTO.java new file mode 100644 index 00000000000..43d1afc31e2 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/UpdatedDataverseFeaturedItemDTO.java @@ -0,0 +1,72 @@ +package edu.harvard.iq.dataverse.api.dto; + +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; + +import java.io.InputStream; + +public class UpdatedDataverseFeaturedItemDTO { + private String content; + private int displayOrder; + private boolean keepFile; + private InputStream imageFileInputStream; + private String imageFileName; + + public static UpdatedDataverseFeaturedItemDTO fromFormData(String content, + int displayOrder, + boolean keepFile, + InputStream imageFileInputStream, + FormDataContentDisposition contentDispositionHeader) { + UpdatedDataverseFeaturedItemDTO updatedDataverseFeaturedItemDTO = new UpdatedDataverseFeaturedItemDTO(); + + updatedDataverseFeaturedItemDTO.content = content; + updatedDataverseFeaturedItemDTO.displayOrder = displayOrder; + updatedDataverseFeaturedItemDTO.keepFile = keepFile; + + if (imageFileInputStream != null) { + updatedDataverseFeaturedItemDTO.imageFileInputStream = imageFileInputStream; + updatedDataverseFeaturedItemDTO.imageFileName = contentDispositionHeader.getFileName(); + } + + return updatedDataverseFeaturedItemDTO; + } + + public void setContent(String content) { + this.content = content; + } + + public String getContent() { + return content; + } + + public void setDisplayOrder(int displayOrder) { + this.displayOrder = displayOrder; + } + + public int getDisplayOrder() { + return displayOrder; + } + + public void setKeepFile(boolean keepFile) { + this.keepFile = keepFile; + } + + public boolean isKeepFile() { + return keepFile; + } + + public void setImageFileInputStream(InputStream imageFileInputStream) { + this.imageFileInputStream = imageFileInputStream; + } + + public InputStream getImageFileInputStream() { + return imageFileInputStream; + } + + public void setImageFileName(String imageFileName) { + this.imageFileName = imageFileName; + } + + public String getImageFileName() { + return imageFileName; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardMoveDatasetPage.java similarity index 79% rename from src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java rename to src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardMoveDatasetPage.java index 6fc80312bf5..b1333b02a46 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardMoveDatasetPage.java @@ -28,13 +28,11 @@ import jakarta.faces.view.ViewScoped; import jakarta.inject.Inject; import jakarta.inject.Named; -import jakarta.persistence.EntityManager; -import jakarta.persistence.PersistenceContext; import jakarta.servlet.http.HttpServletRequest; @ViewScoped -@Named("DashboardDatamovePage") -public class DashboardDatamovePage implements java.io.Serializable { +@Named("DashboardMoveDatasetPage") +public class DashboardMoveDatasetPage implements java.io.Serializable { @Inject DataverseSession session; @@ -49,11 +47,8 @@ public class DashboardDatamovePage implements java.io.Serializable { DataverseServiceBean dataverseService; @Inject SettingsWrapper settingsWrapper; - - @PersistenceContext(unitName = "VDCNet-ejbPU") - private EntityManager em; - private static final Logger logger = Logger.getLogger(DashboardDatamovePage.class.getCanonicalName()); + private static final Logger logger = Logger.getLogger(DashboardMoveDatasetPage.class.getCanonicalName()); private AuthenticatedUser authUser = null; @@ -122,18 +117,18 @@ public String init() { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, - BundleUtil.getStringFromBundle("dashboard.card.datamove.manage"), - BundleUtil.getStringFromBundle("dashboard.card.datamove.message", Arrays.asList(settingsWrapper.getGuidesBaseUrl(), settingsWrapper.getGuidesVersion())))); + BundleUtil.getStringFromBundle("dashboard.card.move.dataset.manage"), + BundleUtil.getStringFromBundle("dashboard.move.dataset.message", Arrays.asList(settingsWrapper.getGuidesBaseUrl(), settingsWrapper.getGuidesVersion())))); return null; } public void move(){ Dataset ds = selectedSourceDataset; - String dsPersistentId = ds!=null?ds.getGlobalId().asString():null; - String srcAlias = ds!=null?ds.getOwner().getAlias():null; + String dsPersistentId = ds != null ? ds.getGlobalId().asString() : null; + String srcAlias = ds != null ? ds.getOwner().getAlias() : null; Dataverse target = selectedDestinationDataverse; - String dstAlias = target!=null?target.getAlias():null; + String dstAlias = target != null ? target.getAlias() : null; if (ds == null || target == null) { // Move only works if both inputs are correct @@ -148,9 +143,9 @@ public void move(){ // construct arguments for message List arguments = new ArrayList<>(); - arguments.add(ds!=null?ds.getDisplayName():"-"); - arguments.add(dsPersistentId!=null?dsPersistentId:"-"); - arguments.add(target!=null?target.getName():"-"); + arguments.add(ds != null ? ds.getDisplayName() : "-"); + arguments.add(dsPersistentId != null ? dsPersistentId : "-"); + arguments.add(target != null ? target.getName() : "-"); // copied logic from Datasets API move //Command requires Super user - it will be tested by the command @@ -163,7 +158,7 @@ public void move(){ logger.info("Moved " + dsPersistentId + " from " + srcAlias + " to " + dstAlias); - JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dashboard.card.datamove.message.success", arguments)); + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dashboard.move.dataset.message.success", arguments)); } catch (CommandException e) { logger.log(Level.SEVERE,"Unable to move "+ dsPersistentId + " from " + srcAlias + " to " + dstAlias, e); @@ -172,25 +167,20 @@ public void move(){ String guidesBaseUrl = settingsWrapper.getGuidesBaseUrl(); String version = settingsWrapper.getGuidesVersion(); // Suggest using the API to force the move. - arguments.add(BundleUtil.getStringFromBundle("dashboard.card.datamove.dataset.command.error.unforced.suggestForce", Arrays.asList(guidesBaseUrl, version))); + arguments.add(BundleUtil.getStringFromBundle("dashboard.move.dataset.command.error.unforced.suggestForce", Arrays.asList(guidesBaseUrl, version))); } else { String emptyStringNoDetails = ""; arguments.add(emptyStringNoDetails); } FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, - BundleUtil.getStringFromBundle("dashboard.card.datamove.message.failure.summary"), - BundleUtil.getStringFromBundle("dashboard.card.datamove.message.failure.details", arguments))); + BundleUtil.getStringFromBundle("dashboard.move.dataset.message.failure.summary"), + BundleUtil.getStringFromBundle("dashboard.move.dataset.message.failure.details", arguments))); } } - public String getDataverseCount() { - long count = em.createQuery("SELECT count(dv) FROM Dataverse dv", Long.class).getSingleResult(); - return NumberFormat.getInstance().format(count); - } - public String getDatasetCount() { - long count = em.createQuery("SELECT count(ds) FROM Dataset ds", Long.class).getSingleResult(); + long count = datasetService.getDatasetCount(); return NumberFormat.getInstance().format(count); } diff --git a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardMoveDataversePage.java b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardMoveDataversePage.java new file mode 100644 index 00000000000..be3d05a823e --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardMoveDataversePage.java @@ -0,0 +1,163 @@ +package edu.harvard.iq.dataverse.dashboard; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.DataverseSession; +import edu.harvard.iq.dataverse.EjbDataverseEngine; +import edu.harvard.iq.dataverse.PermissionsWrapper; +import edu.harvard.iq.dataverse.SettingsWrapper; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.MoveDataverseCommand; +import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.JsfHelper; +import java.text.NumberFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; +import jakarta.ejb.EJB; +import jakarta.faces.application.FacesMessage; +import jakarta.faces.component.UIInput; +import jakarta.faces.context.FacesContext; +import jakarta.faces.view.ViewScoped; +import jakarta.inject.Inject; +import jakarta.inject.Named; +import jakarta.servlet.http.HttpServletRequest; + +@ViewScoped +@Named("DashboardMoveDataversePage") +public class DashboardMoveDataversePage implements java.io.Serializable { + + @Inject + DataverseSession session; + @Inject + PermissionsWrapper permissionsWrapper; + @EJB + EjbDataverseEngine commandEngine; + @EJB + DataverseServiceBean dataverseService; + @Inject + SettingsWrapper settingsWrapper; + + private static final Logger logger = Logger.getLogger(DashboardMoveDataversePage.class.getCanonicalName()); + + private AuthenticatedUser authUser = null; + + // source dataverse + + public UIInput getSelectedSourceDataverseMenu() { + return selectedSourceDataverseMenu; + } + + public void setSelectedSourceDataverseMenu(UIInput selectedSourceDataverseMenu) { + this.selectedSourceDataverseMenu = selectedSourceDataverseMenu; + } + + UIInput selectedSourceDataverseMenu; + + public Dataverse getSelectedSourceDataverse() { + return selectedSourceDataverse; + } + + public void setSelectedSourceDataverse(Dataverse selectedSourceDataverse) { + this.selectedSourceDataverse = selectedSourceDataverse; + } + + private Dataverse selectedSourceDataverse; + + // destination dataverse + + public UIInput getSelectedDataverseMenu() { + return selectedDataverseMenu; + } + + public void setSelectedDataverseMenu(UIInput selectedDataverseMenu) { + this.selectedDataverseMenu = selectedDataverseMenu; + } + + UIInput selectedDataverseMenu; + + public Dataverse getSelectedDestinationDataverse() { + return selectedDestinationDataverse; + } + + public void setSelectedDestinationDataverse(Dataverse selectedDestinationDataverse) { + this.selectedDestinationDataverse = selectedDestinationDataverse; + } + + private Dataverse selectedDestinationDataverse; + + public List completeSelectedDataverse(String query) { + return dataverseService.filterByAliasQuery(query); + } + + public String init() { + + if ((session.getUser() != null) && (session.getUser().isAuthenticated()) && (session.getUser().isSuperuser())) { + authUser = (AuthenticatedUser) session.getUser(); + // initialize components, if any need it + } else { + return permissionsWrapper.notAuthorized(); + // redirect to login OR give some type of โ€˜you must be logged in' message + } + + FacesContext.getCurrentInstance().addMessage(null, + new FacesMessage(FacesMessage.SEVERITY_INFO, + BundleUtil.getStringFromBundle("dashboard.move.dataverse.message.summary"), + BundleUtil.getStringFromBundle("dashboard.move.dataverse.message.detail", Arrays.asList(settingsWrapper.getGuidesBaseUrl(), settingsWrapper.getGuidesVersion())))); + return null; + } + + public void move(){ + Dataverse dvSource = selectedSourceDataverse; + String srcAlias = dvSource != null ? dvSource.getAlias() : null; + + Dataverse target = selectedDestinationDataverse; + String dstAlias = target != null ? target.getAlias() : null; + + if (dvSource == null || target == null) { + // Move only works if both inputs are correct + // But if these inputs are required, we should never get here + // Since we never get here, we aren't bothering to move this English to the bundle. + FacesContext.getCurrentInstance().addMessage(null, + new FacesMessage("Please specify all fields")); + return; + } + + // construct arguments for message + List arguments = new ArrayList<>(); + arguments.add(dvSource != null ? dvSource.getName() : "-"); + arguments.add(target != null ? target.getName() : "-"); + + // copied logic from Dataverse API move + //Command requires Super user - it will be tested by the command + try { + HttpServletRequest httpServletRequest = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest(); + DataverseRequest dataverseRequest = new DataverseRequest(authUser, httpServletRequest); + commandEngine.submit(new MoveDataverseCommand( + dataverseRequest, dvSource, target, false + )); + + logger.info("Moved " + srcAlias + " to " + dstAlias); + + JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dashboard.move.dataverse.message.success", arguments)); + } + catch (CommandException e) { + logger.log(Level.SEVERE,"Unable to move "+ srcAlias + " to " + dstAlias, e); + arguments.add(e.getLocalizedMessage()); + FacesContext.getCurrentInstance().addMessage(null, + new FacesMessage(FacesMessage.SEVERITY_ERROR, + BundleUtil.getStringFromBundle("dashboard.move.dataverse.message.failure.summary"), + BundleUtil.getStringFromBundle("dashboard.move.dataverse.message.failure.details", arguments))); + } + } + + public String getDataverseCount() { + long count = dataverseService.getDataverseCount(); + return NumberFormat.getInstance().format(count); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java index 78bf232e1a6..727703852eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetType.java @@ -1,17 +1,23 @@ package edu.harvard.iq.dataverse.dataset; +import edu.harvard.iq.dataverse.MetadataBlock; import jakarta.json.Json; +import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObjectBuilder; +import jakarta.persistence.CascadeType; import jakarta.persistence.Column; import jakarta.persistence.Entity; import jakarta.persistence.GeneratedValue; import jakarta.persistence.GenerationType; import jakarta.persistence.Id; +import jakarta.persistence.ManyToMany; import jakarta.persistence.NamedQueries; import jakarta.persistence.NamedQuery; import jakarta.persistence.Table; import jakarta.persistence.UniqueConstraint; import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; @NamedQueries({ @NamedQuery(name = "DatasetType.findAll", @@ -42,6 +48,12 @@ public class DatasetType implements Serializable { @Column(nullable = false) private String name; + /** + * The metadata blocks this dataset type is linked to. + */ + @ManyToMany(cascade = {CascadeType.MERGE}) + private List metadataBlocks = new ArrayList<>(); + public DatasetType() { } @@ -61,10 +73,23 @@ public void setName(String name) { this.name = name; } + public List getMetadataBlocks() { + return metadataBlocks; + } + + public void setMetadataBlocks(List metadataBlocks) { + this.metadataBlocks = metadataBlocks; + } + public JsonObjectBuilder toJson() { + JsonArrayBuilder linkedMetadataBlocks = Json.createArrayBuilder(); + for (MetadataBlock metadataBlock : this.getMetadataBlocks()) { + linkedMetadataBlocks.add(metadataBlock.getName()); + } return Json.createObjectBuilder() .add("id", getId()) - .add("name", getName()); + .add("name", getName()) + .add("linkedMetadataBlocks", linkedMetadataBlocks); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataverse/featured/DataverseFeaturedItem.java b/src/main/java/edu/harvard/iq/dataverse/dataverse/featured/DataverseFeaturedItem.java new file mode 100644 index 00000000000..53d09516789 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/dataverse/featured/DataverseFeaturedItem.java @@ -0,0 +1,88 @@ +package edu.harvard.iq.dataverse.dataverse.featured; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.util.SystemConfig; +import jakarta.persistence.*; +import jakarta.validation.constraints.NotBlank; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.Size; + +@NamedQueries({ + @NamedQuery(name = "DataverseFeaturedItem.deleteById", + query = "DELETE FROM DataverseFeaturedItem item WHERE item.id=:id"), + @NamedQuery(name = "DataverseFeaturedItem.findByDataverseOrderedByDisplayOrder", + query = "SELECT item FROM DataverseFeaturedItem item WHERE item.dataverse = :dataverse ORDER BY item.displayOrder ASC") +}) +@Entity +@Table(indexes = @Index(columnList = "displayOrder")) +public class DataverseFeaturedItem { + + public static final int MAX_FEATURED_ITEM_CONTENT_SIZE = 15000; + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @ManyToOne + @JoinColumn(nullable = false) + private Dataverse dataverse; + + @NotBlank + @Size(max = MAX_FEATURED_ITEM_CONTENT_SIZE) + @Lob + @Column(columnDefinition = "TEXT", nullable = false) + private String content; + + @Min(0) + @Column(nullable = false) + private int displayOrder; + + private String imageFileName; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Dataverse getDataverse() { + return dataverse; + } + + public void setDataverse(Dataverse dataverse) { + this.dataverse = dataverse; + } + + public String getContent() { + return content; + } + + public void setContent(String content) { + this.content = content; + } + + public int getDisplayOrder() { + return displayOrder; + } + + public void setDisplayOrder(int displayOrder) { + this.displayOrder = displayOrder; + } + + public String getImageFileName() { + return imageFileName; + } + + public void setImageFileName(String imageFileName) { + this.imageFileName = imageFileName; + } + + public String getImageFileUrl() { + if (id != null && imageFileName != null) { + return SystemConfig.getDataverseSiteUrlStatic() + "/api/access/dataverseFeaturedItemImage/" + id; + } + return null; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/dataverse/featured/DataverseFeaturedItemServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/dataverse/featured/DataverseFeaturedItemServiceBean.java new file mode 100644 index 00000000000..56cdaf5692e --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/dataverse/featured/DataverseFeaturedItemServiceBean.java @@ -0,0 +1,100 @@ +package edu.harvard.iq.dataverse.dataverse.featured; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.FileUtil; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.Serializable; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.util.List; + +@Stateless +@Named +public class DataverseFeaturedItemServiceBean implements Serializable { + + public static class InvalidImageFileException extends Exception { + public InvalidImageFileException(String message) { + super(message); + } + } + + @PersistenceContext(unitName = "VDCNet-ejbPU") + private EntityManager em; + + public DataverseFeaturedItem findById(Long id) { + return em.find(DataverseFeaturedItem.class, id); + } + + public DataverseFeaturedItem save(DataverseFeaturedItem dataverseFeaturedItem) { + if (dataverseFeaturedItem.getId() == null) { + em.persist(dataverseFeaturedItem); + em.flush(); + } else { + dataverseFeaturedItem = em.merge(dataverseFeaturedItem); + } + return dataverseFeaturedItem; + } + + public void delete(Long id) { + em.createNamedQuery("DataverseFeaturedItem.deleteById", DataverseFeaturedItem.class) + .setParameter("id", id) + .executeUpdate(); + } + + public List findAllByDataverseOrdered(Dataverse dataverse) { + return em + .createNamedQuery("DataverseFeaturedItem.findByDataverseOrderedByDisplayOrder", DataverseFeaturedItem.class) + .setParameter("dataverse", dataverse) + .getResultList(); + } + + public InputStream getImageFileAsInputStream(DataverseFeaturedItem dataverseFeaturedItem) throws IOException { + Path imagePath = Path.of(JvmSettings.DOCROOT_DIRECTORY.lookup(), + JvmSettings.FEATURED_ITEMS_IMAGE_UPLOADS_DIRECTORY.lookup(), + dataverseFeaturedItem.getDataverse().getId().toString(), + dataverseFeaturedItem.getImageFileName()); + return Files.newInputStream(imagePath); + } + + public void saveDataverseFeaturedItemImageFile(InputStream inputStream, String imageFileName, Long dataverseId) throws IOException, InvalidImageFileException { + File tempFile = FileUtil.inputStreamToFile(inputStream); + validateImageFile(tempFile); + + Path imageDir = FileUtil.createDirStructure( + JvmSettings.DOCROOT_DIRECTORY.lookup(), + JvmSettings.FEATURED_ITEMS_IMAGE_UPLOADS_DIRECTORY.lookup(), + dataverseId.toString() + ); + File uploadedFile = new File(imageDir.toFile(), imageFileName); + + if (!uploadedFile.exists()) { + uploadedFile.createNewFile(); + } + + Files.copy(tempFile.toPath(), uploadedFile.toPath(), StandardCopyOption.REPLACE_EXISTING); + } + + private void validateImageFile(File file) throws IOException, InvalidImageFileException { + if (!FileUtil.isFileOfImageType(file)) { + throw new InvalidImageFileException( + BundleUtil.getStringFromBundle("dataverse.create.featuredItem.error.invalidFileType") + ); + } + Integer maxAllowedSize = JvmSettings.FEATURED_ITEMS_IMAGE_MAXSIZE.lookup(Integer.class); + if (file.length() > maxAllowedSize) { + throw new InvalidImageFileException( + BundleUtil.getStringFromBundle("dataverse.create.featuredItem.error.fileSizeExceedsLimit", List.of(maxAllowedSize.toString())) + ); + } + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java index 282cbb88988..42f2616cd80 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java @@ -1,28 +1,10 @@ package edu.harvard.iq.dataverse.engine.command; -import edu.harvard.iq.dataverse.DataFileServiceBean; -import edu.harvard.iq.dataverse.DatasetFieldServiceBean; -import edu.harvard.iq.dataverse.DatasetLinkingServiceBean; -import edu.harvard.iq.dataverse.DatasetServiceBean; -import edu.harvard.iq.dataverse.DatasetVersionServiceBean; -import edu.harvard.iq.dataverse.DataverseFacetServiceBean; -import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevelServiceBean; -import edu.harvard.iq.dataverse.DataverseLinkingServiceBean; -import edu.harvard.iq.dataverse.DataverseRoleServiceBean; -import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; -import edu.harvard.iq.dataverse.DvObjectServiceBean; -import edu.harvard.iq.dataverse.FeaturedDataverseServiceBean; -import edu.harvard.iq.dataverse.FileDownloadServiceBean; -import edu.harvard.iq.dataverse.GuestbookResponseServiceBean; -import edu.harvard.iq.dataverse.GuestbookServiceBean; -import edu.harvard.iq.dataverse.MetadataBlockServiceBean; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItemServiceBean; import edu.harvard.iq.dataverse.search.IndexServiceBean; -import edu.harvard.iq.dataverse.PermissionServiceBean; -import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; import edu.harvard.iq.dataverse.search.SearchServiceBean; -import edu.harvard.iq.dataverse.TemplateServiceBean; -import edu.harvard.iq.dataverse.UserNotificationServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; @@ -152,4 +134,6 @@ public interface CommandContext { public void addCommand(Command command); public DatasetFieldServiceBean dsField(); + + public DataverseFeaturedItemServiceBean dataverseFeaturedItems(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/InvalidCommandArgumentsException.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/InvalidCommandArgumentsException.java new file mode 100644 index 00000000000..95c6f52b880 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/exception/InvalidCommandArgumentsException.java @@ -0,0 +1,25 @@ +package edu.harvard.iq.dataverse.engine.command.exception; + +import edu.harvard.iq.dataverse.engine.command.Command; + +/** + * Exception thrown when a {@link Command} is executed with invalid or malformed arguments. + *

+ * This exception typically indicates that the input parameters provided to the command + * do not meet the required criteria (e.g., missing fields, invalid formats, or other + * constraints). + *

+ *

+ * Example scenarios: + *

    + *
  • A required argument is null or missing.
  • + *
  • An argument is in an invalid format (e.g., a malformed email address).
  • + *
  • Arguments violate business rules or constraints.
  • + *
+ */ +public class InvalidCommandArgumentsException extends CommandException { + + public InvalidCommandArgumentsException(String message, Command aCommand) { + super(message, aCommand); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java index 91f3a5b823c..2a72485d821 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -19,15 +19,13 @@ abstract class AbstractWriteDataverseCommand extends AbstractCommand private final List inputLevels; private final List facets; protected final List metadataBlocks; - private final boolean resetRelationsOnNullValues; public AbstractWriteDataverseCommand(Dataverse dataverse, Dataverse affectedDataverse, DataverseRequest request, List facets, List inputLevels, - List metadataBlocks, - boolean resetRelationsOnNullValues) { + List metadataBlocks) { super(request, affectedDataverse); this.dataverse = dataverse; if (facets != null) { @@ -45,7 +43,6 @@ public AbstractWriteDataverseCommand(Dataverse dataverse, } else { this.metadataBlocks = null; } - this.resetRelationsOnNullValues = resetRelationsOnNullValues; } @Override @@ -59,46 +56,61 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { return ctxt.dataverses().save(dataverse); } + /* + metadataBlocks = null - ignore + metadataBlocks is empty - delete and inherit from parent + metadataBlocks is not empty - set with new updated values + */ private void processMetadataBlocks() { - if (metadataBlocks != null && !metadataBlocks.isEmpty()) { - dataverse.setMetadataBlockRoot(true); - dataverse.setMetadataBlocks(metadataBlocks); - } else if (resetRelationsOnNullValues) { - dataverse.setMetadataBlockRoot(false); - dataverse.clearMetadataBlocks(); + if (metadataBlocks != null) { + if (metadataBlocks.isEmpty()) { + dataverse.setMetadataBlockRoot(false); + dataverse.clearMetadataBlocks(); + } else { + dataverse.setMetadataBlockRoot(true); + dataverse.setMetadataBlocks(metadataBlocks); + } } } + /* + facets = null - ignore + facets is empty - delete and inherit from parent + facets is not empty - set with new updated values + */ private void processFacets(CommandContext ctxt) { if (facets != null) { - ctxt.facets().deleteFacetsFor(dataverse); - dataverse.setDataverseFacets(new ArrayList<>()); - - if (!facets.isEmpty()) { + if (facets.isEmpty()) { + ctxt.facets().deleteFacetsFor(dataverse); + dataverse.setFacetRoot(false); + } else { + ctxt.facets().deleteFacetsFor(dataverse); + dataverse.setDataverseFacets(new ArrayList<>()); dataverse.setFacetRoot(true); + for (int i = 0; i < facets.size(); i++) { + ctxt.facets().create(i, facets.get(i), dataverse); + } } - - for (int i = 0; i < facets.size(); i++) { - ctxt.facets().create(i, facets.get(i), dataverse); - } - } else if (resetRelationsOnNullValues) { - ctxt.facets().deleteFacetsFor(dataverse); - dataverse.setFacetRoot(false); } } + /* + inputLevels = null - ignore + inputLevels is empty - delete + inputLevels is not empty - set with new updated values + */ private void processInputLevels(CommandContext ctxt) { if (inputLevels != null) { - if (!inputLevels.isEmpty()) { + if (inputLevels.isEmpty()) { + ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); + } else { dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels); + ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); + inputLevels.forEach(inputLevel -> { + inputLevel.setDataverse(dataverse); + ctxt.fieldTypeInputLevels().create(inputLevel); + }); } - ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); - inputLevels.forEach(inputLevel -> { - inputLevel.setDataverse(dataverse); - ctxt.fieldTypeInputLevels().create(inputLevel); - }); - } else if (resetRelationsOnNullValues) { - ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseFeaturedItemCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseFeaturedItemCommand.java new file mode 100644 index 00000000000..8c4a8281345 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseFeaturedItemCommand.java @@ -0,0 +1,77 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItem; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItemServiceBean; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.InvalidCommandArgumentsException; +import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.MarkupChecker; + +import java.io.IOException; +import java.io.InputStream; +import java.text.MessageFormat; +import java.util.List; + +/** + * An abstract base class for commands that perform write operations on {@link DataverseFeaturedItem}s. + */ +@RequiredPermissions({Permission.EditDataverse}) +abstract class AbstractWriteDataverseFeaturedItemCommand extends AbstractCommand { + + protected final Dataverse dataverse; + + public AbstractWriteDataverseFeaturedItemCommand(DataverseRequest request, Dataverse affectedDataverse) { + super(request, affectedDataverse); + this.dataverse = affectedDataverse; + } + + protected void validateAndSetContent(DataverseFeaturedItem featuredItem, String content) throws InvalidCommandArgumentsException { + if (content == null || content.trim().isEmpty()) { + throw new InvalidCommandArgumentsException( + BundleUtil.getStringFromBundle("dataverse.create.featuredItem.error.contentShouldBeProvided"), + this + ); + } + content = MarkupChecker.sanitizeAdvancedHTML(content); + if (content.length() > DataverseFeaturedItem.MAX_FEATURED_ITEM_CONTENT_SIZE) { + throw new InvalidCommandArgumentsException( + MessageFormat.format( + BundleUtil.getStringFromBundle("dataverse.create.featuredItem.error.contentExceedsLengthLimit"), + List.of(DataverseFeaturedItem.MAX_FEATURED_ITEM_CONTENT_SIZE) + ), + this + ); + } + featuredItem.setContent(content); + } + + protected void setFileImageIfAvailableOrNull(DataverseFeaturedItem featuredItem, String imageFileName, InputStream imageFileInputStream, CommandContext ctxt) throws CommandException { + if (imageFileName != null && imageFileInputStream != null) { + try { + ctxt.dataverseFeaturedItems().saveDataverseFeaturedItemImageFile(imageFileInputStream, imageFileName, dataverse.getId()); + } catch (DataverseFeaturedItemServiceBean.InvalidImageFileException e) { + throw new InvalidCommandArgumentsException( + e.getMessage(), + this + ); + } catch (IOException e) { + throw new CommandException( + BundleUtil.getStringFromBundle( + "dataverse.create.featuredItem.error.imageFileProcessing", + List.of(e.getMessage()) + ), + this + ); + } + featuredItem.setImageFileName(imageFileName); + } else { + featuredItem.setImageFileName(null); + } + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForDatasetFeedbackCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForDatasetFeedbackCommand.java new file mode 100644 index 00000000000..d25dbd974c2 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CheckRateLimitForDatasetFeedbackCommand.java @@ -0,0 +1,18 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +public class CheckRateLimitForDatasetFeedbackCommand extends AbstractVoidCommand { + + public CheckRateLimitForDatasetFeedbackCommand(DataverseRequest aRequest, DvObject dvObject) { + super(aRequest, dvObject); + } + + @Override + protected void executeImpl(CommandContext ctxt) throws CommandException { } +} + diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 3728f3ee6ce..145cfb6199c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -39,7 +39,7 @@ public CreateDataverseCommand(Dataverse created, List facets, List inputLevels, List metadataBlocks) { - super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks, false); + super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseFeaturedItemCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseFeaturedItemCommand.java new file mode 100644 index 00000000000..24732d05c8b --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseFeaturedItemCommand.java @@ -0,0 +1,42 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItem; +import edu.harvard.iq.dataverse.api.dto.NewDataverseFeaturedItemDTO; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +/** + * Creates a featured item {@link DataverseFeaturedItem} for a {@link Dataverse}. + */ +public class CreateDataverseFeaturedItemCommand extends AbstractWriteDataverseFeaturedItemCommand { + + private final NewDataverseFeaturedItemDTO newDataverseFeaturedItemDTO; + + public CreateDataverseFeaturedItemCommand(DataverseRequest request, + Dataverse dataverse, + NewDataverseFeaturedItemDTO newDataverseFeaturedItemDTO) { + super(request, dataverse); + this.newDataverseFeaturedItemDTO = newDataverseFeaturedItemDTO; + } + + @Override + public DataverseFeaturedItem execute(CommandContext ctxt) throws CommandException { + DataverseFeaturedItem dataverseFeaturedItem = new DataverseFeaturedItem(); + + validateAndSetContent(dataverseFeaturedItem, newDataverseFeaturedItemDTO.getContent()); + dataverseFeaturedItem.setDisplayOrder(newDataverseFeaturedItemDTO.getDisplayOrder()); + + setFileImageIfAvailableOrNull( + dataverseFeaturedItem, + newDataverseFeaturedItemDTO.getImageFileName(), + newDataverseFeaturedItemDTO.getImageFileInputStream(), + ctxt + ); + + dataverseFeaturedItem.setDataverse(dataverse); + + return ctxt.dataverseFeaturedItems().save(dataverseFeaturedItem); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java index e9a2025b112..e4130b534b3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java @@ -53,7 +53,7 @@ import static edu.harvard.iq.dataverse.util.FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT; import static edu.harvard.iq.dataverse.util.FileUtil.createIngestFailureReport; import static edu.harvard.iq.dataverse.util.FileUtil.determineFileType; -import static edu.harvard.iq.dataverse.util.FileUtil.determineFileTypeByNameAndExtension; +import static edu.harvard.iq.dataverse.util.FileUtil.determineRemoteFileType; import static edu.harvard.iq.dataverse.util.FileUtil.getFilesTempDirectory; import static edu.harvard.iq.dataverse.util.FileUtil.saveInputStreamInTempFile; import static edu.harvard.iq.dataverse.util.FileUtil.useRecognizedType; @@ -574,6 +574,8 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } else { // Direct upload. + finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; + // Since this is a direct upload, and therefore no temp file associated // with it, we may, OR MAY NOT know the size of the file. If this is // a direct upload via the UI, the page must have already looked up @@ -593,18 +595,6 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException } } - // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied - finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType; - String type = determineFileTypeByNameAndExtension(fileName); - if (!StringUtils.isBlank(type)) { - //Use rules for deciding when to trust browser supplied type - if (useRecognizedType(finalType, type)) { - finalType = type; - } - logger.fine("Supplied type: " + suppliedContentType + ", finalType: " + finalType); - } - - } // Finally, if none of the special cases above were applicable (or @@ -635,6 +625,30 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException DataFile datafile = FileUtil.createSingleDataFile(version, newFile, newStorageIdentifier, fileName, finalType, newCheckSumType, newCheckSum); if (datafile != null) { + if (newStorageIdentifier != null) { + // Direct upload case + // Improve the MIMEType + // Need the owner for the StorageIO class to get the file/S3 path from the + // storageIdentifier + // Currently owner is null, but using this flag will avoid making changes here + // if that isn't true in the future + boolean ownerSet = datafile.getOwner() != null; + if (!ownerSet) { + datafile.setOwner(version.getDataset()); + } + String type = determineRemoteFileType(datafile, fileName); + if (!StringUtils.isBlank(type)) { + // Use rules for deciding when to trust browser supplied type + if (useRecognizedType(finalType, type)) { + datafile.setContentType(type); + } + logger.fine("Supplied type: " + suppliedContentType + ", finalType: " + finalType); + } + // Avoid changing + if (!ownerSet) { + datafile.setOwner(null); + } + } if (warningMessage != null) { createIngestFailureReport(datafile, warningMessage); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java index 8cffcd3d821..4a897adefa2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java @@ -22,12 +22,12 @@ @RequiredPermissions(Permission.ManageDataversePermissions) public class CreateRoleCommand extends AbstractCommand { - private final DataverseRole created; + private final DataverseRole role; private final Dataverse dv; public CreateRoleCommand(DataverseRole aRole, DataverseRequest aRequest, Dataverse anAffectedDataverse) { super(aRequest, anAffectedDataverse); - created = aRole; + role = aRole; dv = anAffectedDataverse; } @@ -41,16 +41,16 @@ public DataverseRole execute(CommandContext ctxt) throws CommandException { //Test to see if the role already exists in DB try { DataverseRole testRole = ctxt.em().createNamedQuery("DataverseRole.findDataverseRoleByAlias", DataverseRole.class) - .setParameter("alias", created.getAlias()) + .setParameter("alias", role.getAlias()) .getSingleResult(); - if (!(testRole == null)) { + if (testRole != null && !testRole.getId().equals(role.getId())) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("permission.role.not.created.alias.already.exists"), this); } } catch (NoResultException nre) { - // we want no results because that meand we can create a role + // we want no results because that meant we can create a role } - dv.addRole(created); - return ctxt.roles().save(created); + dv.addRole(role); + return ctxt.roles().save(role); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataverseCommand.java index c7c592f9458..84a0ab0f3f2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataverseCommand.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItem; import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.RoleAssignment; @@ -78,6 +79,14 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { ctxt.em().remove(merged); } doomed.setDataverseFieldTypeInputLevels(new ArrayList<>()); + + // Featured Items + for (DataverseFeaturedItem featuredItem : doomed.getDataverseFeaturedItems()) { + DataverseFeaturedItem merged = ctxt.em().merge(featuredItem); + ctxt.em().remove(merged); + } + doomed.setDataverseFeaturedItems(new ArrayList<>()); + // DATAVERSE Dataverse doomedAndMerged = ctxt.em().merge(doomed); ctxt.em().remove(doomedAndMerged); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataverseFeaturedItemCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataverseFeaturedItemCommand.java new file mode 100644 index 00000000000..215863a44da --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataverseFeaturedItemCommand.java @@ -0,0 +1,26 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItem; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.*; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +/** + * Deletes a particular featured item {@link DataverseFeaturedItem} of a {@link Dataverse}. + */ +@RequiredPermissions({Permission.EditDataverse}) +public class DeleteDataverseFeaturedItemCommand extends AbstractVoidCommand { + + private final DataverseFeaturedItem doomed; + + public DeleteDataverseFeaturedItemCommand(DataverseRequest request, DataverseFeaturedItem doomed) { + super(request, doomed.getDataverse()); + this.doomed = doomed; + } + + @Override + protected void executeImpl(CommandContext ctxt) throws CommandException { + ctxt.dataverseFeaturedItems().delete(doomed.getId()); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseFeaturedItemCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseFeaturedItemCommand.java new file mode 100644 index 00000000000..c594887b6ed --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDataverseFeaturedItemCommand.java @@ -0,0 +1,37 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItem; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +import java.util.Collections; +import java.util.Map; +import java.util.Set; + +/** + * Retrieves a particular featured item {@link DataverseFeaturedItem}. + */ +public class GetDataverseFeaturedItemCommand extends AbstractCommand { + + private final DataverseFeaturedItem dataverseFeaturedItem; + + public GetDataverseFeaturedItemCommand(DataverseRequest request, DataverseFeaturedItem dataverseFeaturedItem) { + super(request, dataverseFeaturedItem.getDataverse()); + this.dataverseFeaturedItem = dataverseFeaturedItem; + } + + @Override + public DataverseFeaturedItem execute(CommandContext ctxt) throws CommandException { + return dataverseFeaturedItem; + } + + @Override + public Map> getRequiredPermissions() { + return Collections.singletonMap("", + dataverseFeaturedItem.getDataverse().isReleased() ? Collections.emptySet() + : Collections.singleton(Permission.ViewUnpublishedDataverse)); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseFeaturedItemsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseFeaturedItemsCommand.java new file mode 100644 index 00000000000..0d4051fc7d5 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseFeaturedItemsCommand.java @@ -0,0 +1,36 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItem; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +import java.util.*; + +/** + * Lists the featured items {@link DataverseFeaturedItem} of a {@link Dataverse}. + */ +public class ListDataverseFeaturedItemsCommand extends AbstractCommand> { + + private final Dataverse dataverse; + + public ListDataverseFeaturedItemsCommand(DataverseRequest request, Dataverse dataverse) { + super(request, dataverse); + this.dataverse = dataverse; + } + + @Override + public List execute(CommandContext ctxt) throws CommandException { + return ctxt.dataverseFeaturedItems().findAllByDataverseOrdered(dataverse); + } + + @Override + public Map> getRequiredPermissions() { + return Collections.singletonMap("", + dataverse.isReleased() ? Collections.emptySet() + : Collections.singleton(Permission.ViewUnpublishedDataverse)); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java index 8275533ced2..e79d36de07d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommand.java @@ -3,15 +3,18 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.MetadataBlock; import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.dataset.DatasetType; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Stream; /** * Lists the metadata blocks of a {@link Dataverse}. @@ -23,11 +26,13 @@ public class ListMetadataBlocksCommand extends AbstractCommand execute(CommandContext ctxt) throws CommandException if (onlyDisplayedOnCreate) { return listMetadataBlocksDisplayedOnCreate(ctxt, dataverse); } - return dataverse.getMetadataBlocks(); + List orig = dataverse.getMetadataBlocks(); + List extraFromDatasetTypes = new ArrayList<>(); + if (datasetType != null) { + extraFromDatasetTypes = datasetType.getMetadataBlocks(); + } + return Stream.concat(orig.stream(), extraFromDatasetTypes.stream()).toList(); } private List listMetadataBlocksDisplayedOnCreate(CommandContext ctxt, Dataverse dataverse) { if (dataverse.isMetadataBlockRoot() || dataverse.getOwner() == null) { - return ctxt.metadataBlocks().listMetadataBlocksDisplayedOnCreate(dataverse); + List orig = ctxt.metadataBlocks().listMetadataBlocksDisplayedOnCreate(dataverse); + List extraFromDatasetTypes = new ArrayList<>(); + if (datasetType != null) { + extraFromDatasetTypes = datasetType.getMetadataBlocks(); + } + return Stream.concat(orig.stream(), extraFromDatasetTypes.stream()).toList(); } return listMetadataBlocksDisplayedOnCreate(ctxt, dataverse.getOwner()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommand.java index bee5dc648b9..1c3a62ec6de 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommand.java @@ -10,7 +10,6 @@ import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.Guestbook; -import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; @@ -27,7 +26,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.logging.Level; import java.util.logging.Logger; /** @@ -72,13 +70,13 @@ public void executeImpl(CommandContext ctxt) throws CommandException { // validate the move makes sense if (moved.getOwner().equals(destination)) { - throw new IllegalCommandException(BundleUtil.getStringFromBundle("dashboard.card.datamove.dataset.command.error.targetDataverseSameAsOriginalDataverse"), this); + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dashboard.move.dataset.command.error.targetDataverseSameAsOriginalDataverse"), this); } // if dataset is published make sure that its target is published if (moved.isReleased() && !destination.isReleased()){ - throw new IllegalCommandException(BundleUtil.getStringFromBundle("dashboard.card.datamove.dataset.command.error.targetDataverseUnpublishedDatasetPublished", Arrays.asList(destination.getDisplayName())), this); + throw new IllegalCommandException(BundleUtil.getStringFromBundle("dashboard.move.dataset.command.error.targetDataverseUnpublishedDatasetPublished", Arrays.asList(destination.getDisplayName())), this); } //if the datasets guestbook is not contained in the new dataverse then remove it @@ -130,10 +128,10 @@ public void executeImpl(CommandContext ctxt) throws CommandException { if (removeGuestbook || removeLinkDs) { StringBuilder errorString = new StringBuilder(); if (removeGuestbook) { - errorString.append(BundleUtil.getStringFromBundle("dashboard.card.datamove.dataset.command.error.unforced.datasetGuestbookNotInTargetDataverse")); + errorString.append(BundleUtil.getStringFromBundle("dashboard.move.dataset.command.error.unforced.datasetGuestbookNotInTargetDataverse")); } if (removeLinkDs) { - errorString.append(BundleUtil.getStringFromBundle("dashboard.card.datamove.dataset.command.error.unforced.linkedToTargetDataverseOrOneOfItsParents")); + errorString.append(BundleUtil.getStringFromBundle("dashboard.move.dataset.command.error.unforced.linkedToTargetDataverseOrOneOfItsParents")); } throw new UnforcedCommandException(errorString.toString(), this); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java index ea38f5a7af7..c8b59b1818a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java @@ -10,7 +10,6 @@ import edu.harvard.iq.dataverse.Template; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -20,14 +19,12 @@ import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import edu.harvard.iq.dataverse.util.BundleUtil; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.logging.Logger; -import org.apache.solr.client.solrj.SolrServerException; /** * A command to move a {@link Dataverse} between two {@link Dataverse}s. diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetTypeLinksToMetadataBlocksCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetTypeLinksToMetadataBlocksCommand.java new file mode 100644 index 00000000000..57b6da3f90c --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetTypeLinksToMetadataBlocksCommand.java @@ -0,0 +1,37 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.MetadataBlock; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.dataset.DatasetType; +import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import java.util.List; + +@RequiredPermissions({}) +public class UpdateDatasetTypeLinksToMetadataBlocksCommand extends AbstractVoidCommand { + + final DatasetType datasetType; + List metadataBlocks; + + public UpdateDatasetTypeLinksToMetadataBlocksCommand(DataverseRequest dataverseRequest, DatasetType datasetType, List metadataBlocks) { + super(dataverseRequest, (DvObject) null); + this.datasetType = datasetType; + this.metadataBlocks = metadataBlocks; + } + + @Override + protected void executeImpl(CommandContext ctxt) throws CommandException { + if (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser()) { + throw new PermissionException("Update dataset type links to metadata block command can only be called by superusers.", + this, null, null); + } + datasetType.setMetadataBlocks(metadataBlocks); + ctxt.em().merge(datasetType); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index dc8884405ef..209791faafb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -230,7 +230,8 @@ public Dataset execute(CommandContext ctxt) throws CommandException { if (!theDataset.getOrCreateEditVersion().equals(fmd.getDatasetVersion())) { fmd = FileMetadataUtil.getFmdForFileInEditVersion(fmd, theDataset.getOrCreateEditVersion()); } - } + } + fmd.setDataFile(ctxt.em().merge(fmd.getDataFile())); fmd = ctxt.em().merge(fmd); // There are two datafile cases as well - the file has been released, so we're @@ -241,13 +242,15 @@ public Dataset execute(CommandContext ctxt) throws CommandException { ctxt.engine().submit(new DeleteDataFileCommand(fmd.getDataFile(), getRequest())); // and remove the file from the dataset's list theDataset.getFiles().remove(fmd.getDataFile()); + ctxt.em().remove(fmd.getDataFile()); + ctxt.em().remove(fmd); } else { - // if we aren't removing the file, we need to explicitly remove the fmd from the - // context and then remove it from the datafile's list ctxt.em().remove(fmd); + // if we aren't removing the file, we need to remove it from the datafile's list FileMetadataUtil.removeFileMetadataFromList(fmd.getDataFile().getFileMetadatas(), fmd); } - // In either case, to fully remove the fmd, we have to remove any other possible + // In either case, we've removed from the context + // And, to fully remove the fmd, we have to remove any other possible // references // From the datasetversion FileMetadataUtil.removeFileMetadataFromList(theDataset.getOrCreateEditVersion().getFileMetadatas(), fmd); @@ -255,6 +258,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { for (DataFileCategory cat : theDataset.getCategories()) { FileMetadataUtil.removeFileMetadataFromList(cat.getFileMetadatas(), fmd); } + } for(FileMetadata fmd: theDataset.getOrCreateEditVersion().getFileMetadatas()) { logger.fine("FMD: " + fmd.getId() + " for file: " + fmd.getDataFile().getId() + "is in final draft version"); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 6dc4ab4d00d..55cc3708097 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -32,7 +32,7 @@ public UpdateDataverseCommand(Dataverse dataverse, List featuredDataverses, DataverseRequest request, List inputLevels) { - this(dataverse, facets, featuredDataverses, request, inputLevels, null, null, false); + this(dataverse, facets, featuredDataverses, request, inputLevels, null, null); } public UpdateDataverseCommand(Dataverse dataverse, @@ -41,9 +41,8 @@ public UpdateDataverseCommand(Dataverse dataverse, DataverseRequest request, List inputLevels, List metadataBlocks, - DataverseDTO updatedDataverseDTO, - boolean resetRelationsOnNullValues) { - super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks, resetRelationsOnNullValues); + DataverseDTO updatedDataverseDTO) { + super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks); if (featuredDataverses != null) { this.featuredDataverseList = new ArrayList<>(featuredDataverses); } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseFeaturedItemCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseFeaturedItemCommand.java new file mode 100644 index 00000000000..ed6fe825b03 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseFeaturedItemCommand.java @@ -0,0 +1,41 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItem; +import edu.harvard.iq.dataverse.api.dto.UpdatedDataverseFeaturedItemDTO; +import edu.harvard.iq.dataverse.engine.command.*; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +/** + * Updates a particular featured item {@link DataverseFeaturedItem} of a {@link Dataverse}. + */ +public class UpdateDataverseFeaturedItemCommand extends AbstractWriteDataverseFeaturedItemCommand { + + private final DataverseFeaturedItem dataverseFeaturedItem; + private final UpdatedDataverseFeaturedItemDTO updatedDataverseFeaturedItemDTO; + + public UpdateDataverseFeaturedItemCommand(DataverseRequest request, + DataverseFeaturedItem dataverseFeaturedItem, + UpdatedDataverseFeaturedItemDTO updatedDataverseFeaturedItemDTO) { + super(request, dataverseFeaturedItem.getDataverse()); + this.dataverseFeaturedItem = dataverseFeaturedItem; + this.updatedDataverseFeaturedItemDTO = updatedDataverseFeaturedItemDTO; + } + + @Override + public DataverseFeaturedItem execute(CommandContext ctxt) throws CommandException { + validateAndSetContent(dataverseFeaturedItem, updatedDataverseFeaturedItemDTO.getContent()); + dataverseFeaturedItem.setDisplayOrder(updatedDataverseFeaturedItemDTO.getDisplayOrder()); + + if (!updatedDataverseFeaturedItemDTO.isKeepFile()) { + setFileImageIfAvailableOrNull( + dataverseFeaturedItem, + updatedDataverseFeaturedItemDTO.getImageFileName(), + updatedDataverseFeaturedItemDTO.getImageFileInputStream(), + ctxt + ); + } + + return ctxt.dataverseFeaturedItems().save(dataverseFeaturedItem); + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseFeaturedItemsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseFeaturedItemsCommand.java new file mode 100644 index 00000000000..0368efef6b0 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseFeaturedItemsCommand.java @@ -0,0 +1,78 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItem; +import edu.harvard.iq.dataverse.api.dto.NewDataverseFeaturedItemDTO; +import edu.harvard.iq.dataverse.api.dto.UpdatedDataverseFeaturedItemDTO; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.engine.command.*; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Map; + +/** + * Updates all featured items ({@link DataverseFeaturedItem}) for a specified {@link Dataverse}. + *

+ * This command allows for the creation of multiple new featured items, updates to existing items with new parameters, + * or the deletion of existing items, all in a single command. + *

+ **/ +@RequiredPermissions({Permission.EditDataverse}) +public class UpdateDataverseFeaturedItemsCommand extends AbstractCommand> { + + private final Dataverse dataverse; + private final List newDataverseFeaturedItemDTOs; + private final Map dataverseFeaturedItemsToUpdate; + + public UpdateDataverseFeaturedItemsCommand(DataverseRequest request, Dataverse dataverse, List newDataverseFeaturedItemDTOs, Map dataverseFeaturedItemsToUpdate) { + super(request, dataverse); + this.dataverse = dataverse; + this.newDataverseFeaturedItemDTOs = newDataverseFeaturedItemDTOs; + this.dataverseFeaturedItemsToUpdate = dataverseFeaturedItemsToUpdate; + } + + @Override + public List execute(CommandContext ctxt) throws CommandException { + List dataverseFeaturedItems = updateOrDeleteExistingFeaturedItems(ctxt); + dataverseFeaturedItems.addAll(createNewFeaturedItems(ctxt)); + dataverseFeaturedItems.sort(Comparator.comparingInt(DataverseFeaturedItem::getDisplayOrder)); + return dataverseFeaturedItems; + } + + private List updateOrDeleteExistingFeaturedItems(CommandContext ctxt) throws CommandException { + List updatedFeaturedItems = new ArrayList<>(); + List featuredItemsToDelete = dataverse.getDataverseFeaturedItems(); + + for (Map.Entry entry : dataverseFeaturedItemsToUpdate.entrySet()) { + DataverseFeaturedItem featuredItem = entry.getKey(); + UpdatedDataverseFeaturedItemDTO updatedDTO = entry.getValue(); + + featuredItemsToDelete.stream() + .filter(item -> item.getId().equals(featuredItem.getId())) + .findFirst().ifPresent(featuredItemsToDelete::remove); + + DataverseFeaturedItem updatedFeatureItem = ctxt.engine().submit(new UpdateDataverseFeaturedItemCommand(getRequest(), featuredItem, updatedDTO)); + updatedFeaturedItems.add(updatedFeatureItem); + } + + for (DataverseFeaturedItem featuredItem : featuredItemsToDelete) { + ctxt.engine().submit(new DeleteDataverseFeaturedItemCommand(getRequest(), featuredItem)); + } + + return updatedFeaturedItems; + } + + private List createNewFeaturedItems(CommandContext ctxt) throws CommandException { + List createdFeaturedItems = new ArrayList<>(); + + for (NewDataverseFeaturedItemDTO dto : newDataverseFeaturedItemDTOs) { + DataverseFeaturedItem createdFeatureItem = ctxt.engine().submit(new CreateDataverseFeaturedItemCommand(getRequest(), dataverse, dto)); + createdFeaturedItems.add(createdFeatureItem); + } + + return createdFeaturedItems; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java index 0c4b39fd641..d4f2f95389f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java @@ -111,7 +111,11 @@ public Boolean isAvailableToUsers() { @Override public String getMediaType() { - return MediaType.APPLICATION_JSON; + /** + * Changed from "application/json" to "application/ld+json" because + * that's what Signposting expects. + */ + return "application/ld+json"; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java index 8fab6a6704d..a1f480af197 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java @@ -5,6 +5,7 @@ import edu.harvard.iq.dataverse.ControlledVocabularyValue; import edu.harvard.iq.dataverse.DatasetFieldConstant; import edu.harvard.iq.dataverse.DvObjectContainer; +import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO; import edu.harvard.iq.dataverse.api.dto.DatasetDTO; import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO; @@ -85,6 +86,10 @@ public class DdiExportUtil { public static final String NOTE_SUBJECT_CONTENTTYPE = "Content/MIME Type"; public static final String CITATION_BLOCK_NAME = "citation"; + //Some tests don't send real PIDs that can be parsed + //Use constant empty PID in these cases + private static final String EMPTY_PID = "null:nullnullnull"; + public static String datasetDtoAsJson2ddi(String datasetDtoAsJson) { Gson gson = new Gson(); DatasetDTO datasetDto = gson.fromJson(datasetDtoAsJson, DatasetDTO.class); @@ -169,11 +174,14 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) String persistentAuthority = datasetDto.getAuthority(); String persistentId = datasetDto.getIdentifier(); - String pid = persistentProtocol + ":" + persistentAuthority + "/" + persistentId; - String pidUri = pid; - //Some tests don't send real PIDs - don't try to get their URL form - if(!pidUri.equals("null:null/null")) { - pidUri= PidUtil.parseAsGlobalID(persistentProtocol, persistentAuthority, persistentId).asURL(); + GlobalId pid = PidUtil.parseAsGlobalID(persistentProtocol, persistentAuthority, persistentId); + String pidUri, pidString; + if(pid != null) { + pidUri = pid.asURL(); + pidString = pid.asString(); + } else { + pidUri = EMPTY_PID; + pidString = EMPTY_PID; } // The "persistentAgency" tag is used for the "agency" attribute of the // ddi section; back in the DVN3 days we used "handle" and "DOI" @@ -203,7 +211,7 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) XmlWriterUtil.writeAttribute(xmlw, "agency", persistentAgency); - xmlw.writeCharacters(pid); + xmlw.writeCharacters(pidString); xmlw.writeEndElement(); // IDNo writeOtherIdElement(xmlw, version); xmlw.writeEndElement(); // titlStmt @@ -364,14 +372,21 @@ private static void writeDocDescElement (XMLStreamWriter xmlw, DatasetDTO datase String persistentAuthority = datasetDto.getAuthority(); String persistentId = datasetDto.getIdentifier(); - + GlobalId pid = PidUtil.parseAsGlobalID(persistentProtocol, persistentAuthority, persistentId); + String pidString; + if(pid != null) { + pidString = pid.asString(); + } else { + pidString = EMPTY_PID; + } + xmlw.writeStartElement("docDscr"); xmlw.writeStartElement("citation"); xmlw.writeStartElement("titlStmt"); XmlWriterUtil.writeFullElement(xmlw, "titl", XmlWriterUtil.dto2Primitive(version, DatasetFieldConstant.title), datasetDto.getMetadataLanguage()); xmlw.writeStartElement("IDNo"); XmlWriterUtil.writeAttribute(xmlw, "agency", persistentAgency); - xmlw.writeCharacters(persistentProtocol + ":" + persistentAuthority + "/" + persistentId); + xmlw.writeCharacters(pidString); xmlw.writeEndElement(); // IDNo xmlw.writeEndElement(); // titlStmt xmlw.writeStartElement("distStmt"); @@ -396,10 +411,10 @@ private static void writeDocDescElement (XMLStreamWriter xmlw, DatasetDTO datase private static void writeVersionStatement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException{ xmlw.writeStartElement("verStmt"); - xmlw.writeAttribute("source","archive"); + xmlw.writeAttribute("source","archive"); xmlw.writeStartElement("version"); XmlWriterUtil.writeAttribute(xmlw,"date", datasetVersionDTO.getReleaseTime().substring(0, 10)); - XmlWriterUtil.writeAttribute(xmlw,"type", datasetVersionDTO.getVersionState().toString()); + XmlWriterUtil.writeAttribute(xmlw,"type", datasetVersionDTO.getVersionState().toString()); xmlw.writeCharacters(datasetVersionDTO.getVersionNumber().toString()); xmlw.writeEndElement(); // version xmlw.writeEndElement(); // verStmt @@ -670,7 +685,7 @@ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO xmlw.writeStartElement("dataColl"); XmlWriterUtil.writeI18NElement(xmlw, "timeMeth", version, DatasetFieldConstant.timeMethod,lang); XmlWriterUtil.writeI18NElement(xmlw, "dataCollector", version, DatasetFieldConstant.dataCollector, lang); - XmlWriterUtil.writeI18NElement(xmlw, "collectorTraining", version, DatasetFieldConstant.collectorTraining, lang); + XmlWriterUtil.writeI18NElement(xmlw, "collectorTraining", version, DatasetFieldConstant.collectorTraining, lang); XmlWriterUtil.writeI18NElement(xmlw, "frequenc", version, DatasetFieldConstant.frequencyOfDataCollection, lang); XmlWriterUtil.writeI18NElement(xmlw, "sampProc", version, DatasetFieldConstant.samplingProcedure, lang); @@ -691,7 +706,7 @@ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO } } /* and so does : */ - XmlWriterUtil.writeI18NElement(xmlw, "resInstru", version, DatasetFieldConstant.researchInstrument, lang); + XmlWriterUtil.writeI18NElement(xmlw, "resInstru", version, DatasetFieldConstant.researchInstrument, lang); xmlw.writeStartElement("sources"); XmlWriterUtil.writeFullElementList(xmlw, "dataSrc", dto2PrimitiveList(version, DatasetFieldConstant.dataSources)); XmlWriterUtil.writeI18NElement(xmlw, "srcOrig", version, DatasetFieldConstant.originOfSources, lang); @@ -704,7 +719,7 @@ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO XmlWriterUtil.writeI18NElement(xmlw, "actMin", version, DatasetFieldConstant.actionsToMinimizeLoss, lang); /* "" has the uppercase C: */ XmlWriterUtil.writeI18NElement(xmlw, "ConOps", version, DatasetFieldConstant.controlOperations, lang); - XmlWriterUtil.writeI18NElement(xmlw, "weight", version, DatasetFieldConstant.weighting, lang); + XmlWriterUtil.writeI18NElement(xmlw, "weight", version, DatasetFieldConstant.weighting, lang); XmlWriterUtil.writeI18NElement(xmlw, "cleanOps", version, DatasetFieldConstant.cleaningOperations, lang); xmlw.writeEndElement(); //dataColl @@ -715,7 +730,7 @@ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO //XmlWriterUtil.writeFullElement(xmlw, "anylInfo", dto2Primitive(version, DatasetFieldConstant.datasetLevelErrorNotes)); XmlWriterUtil.writeI18NElement(xmlw, "respRate", version, DatasetFieldConstant.responseRate, lang); XmlWriterUtil.writeI18NElement(xmlw, "EstSmpErr", version, DatasetFieldConstant.samplingErrorEstimates, lang); - XmlWriterUtil.writeI18NElement(xmlw, "dataAppr", version, DatasetFieldConstant.otherDataAppraisal, lang); + XmlWriterUtil.writeI18NElement(xmlw, "dataAppr", version, DatasetFieldConstant.otherDataAppraisal, lang); xmlw.writeEndElement(); //anlyInfo xmlw.writeEndElement();//method @@ -867,7 +882,7 @@ private static void writeAuthorsElement(XMLStreamWriter xmlw, DatasetVersionDTO } if (!authorName.isEmpty()){ xmlw.writeStartElement("AuthEnty"); - XmlWriterUtil.writeAttribute(xmlw,"affiliation",authorAffiliation); + XmlWriterUtil.writeAttribute(xmlw,"affiliation",authorAffiliation); xmlw.writeCharacters(authorName); xmlw.writeEndElement(); //AuthEnty } @@ -928,8 +943,8 @@ private static void writeContactsElement(XMLStreamWriter xmlw, DatasetVersionDTO // TODO: Since datasetContactEmail is a required field but datasetContactName is not consider not checking if datasetContactName is empty so we can write out datasetContactEmail. if (!datasetContactName.isEmpty()){ xmlw.writeStartElement("contact"); - XmlWriterUtil.writeAttribute(xmlw,"affiliation",datasetContactAffiliation); - XmlWriterUtil.writeAttribute(xmlw,"email",datasetContactEmail); + XmlWriterUtil.writeAttribute(xmlw,"affiliation",datasetContactAffiliation); + XmlWriterUtil.writeAttribute(xmlw,"email",datasetContactEmail); xmlw.writeCharacters(datasetContactName); xmlw.writeEndElement(); //AuthEnty } @@ -1154,7 +1169,7 @@ private static void writeAbstractElement(XMLStreamWriter xmlw, DatasetVersionDTO } if (!descriptionText.isEmpty()){ xmlw.writeStartElement("abstract"); - XmlWriterUtil.writeAttribute(xmlw,"date",descriptionDate); + XmlWriterUtil.writeAttribute(xmlw,"date",descriptionDate); if(DvObjectContainer.isMetadataLanguageSet(lang)) { xmlw.writeAttribute("xml:lang", lang); } @@ -1189,7 +1204,7 @@ private static void writeGrantElement(XMLStreamWriter xmlw, DatasetVersionDTO da } if (!grantNumber.isEmpty()){ xmlw.writeStartElement("grantNo"); - XmlWriterUtil.writeAttribute(xmlw,"agency",grantAgency); + XmlWriterUtil.writeAttribute(xmlw,"agency",grantAgency); xmlw.writeCharacters(grantNumber); xmlw.writeEndElement(); //grantno } @@ -1221,7 +1236,7 @@ private static void writeOtherIdElement(XMLStreamWriter xmlw, DatasetVersionDTO } if (!otherId.isEmpty()){ xmlw.writeStartElement("IDNo"); - XmlWriterUtil.writeAttribute(xmlw,"agency",otherIdAgency); + XmlWriterUtil.writeAttribute(xmlw,"agency",otherIdAgency); xmlw.writeCharacters(otherId); xmlw.writeEndElement(); //IDNo } @@ -1253,7 +1268,7 @@ private static void writeSoftwareElement(XMLStreamWriter xmlw, DatasetVersionDTO } if (!softwareName.isEmpty()){ xmlw.writeStartElement("software"); - XmlWriterUtil.writeAttribute(xmlw,"version",softwareVersion); + XmlWriterUtil.writeAttribute(xmlw,"version",softwareVersion); xmlw.writeCharacters(softwareName); xmlw.writeEndElement(); //software } @@ -1366,8 +1381,8 @@ private static void writeNotesElement(XMLStreamWriter xmlw, DatasetVersionDTO da } if (!notesText.isEmpty()) { xmlw.writeStartElement("notes"); - XmlWriterUtil.writeAttribute(xmlw,"type",notesType); - XmlWriterUtil.writeAttribute(xmlw,"subject",notesSubject); + XmlWriterUtil.writeAttribute(xmlw,"type",notesType); + XmlWriterUtil.writeAttribute(xmlw,"subject",notesSubject); xmlw.writeCharacters(notesText); xmlw.writeEndElement(); } @@ -1441,9 +1456,9 @@ private static void createOtherMatsFromFileMetadatas(XMLStreamWriter xmlw, JsonA xmlw.writeStartElement("otherMat"); xmlw.writeAttribute("ID", "f" + fileJson.getJsonNumber(("id").toString())); if (fileJson.containsKey("pidUrl")){ - XmlWriterUtil.writeAttribute(xmlw, "URI", fileJson.getString("pidUrl")); + XmlWriterUtil.writeAttribute(xmlw, "URI", fileJson.getString("pidUrl")); } else { - xmlw.writeAttribute("URI", dataverseUrl + "/api/access/datafile/" + fileJson.getJsonNumber("id").toString()); + xmlw.writeAttribute("URI", dataverseUrl + "/api/access/datafile/" + fileJson.getJsonNumber("id").toString()); } xmlw.writeAttribute("level", "datafile"); @@ -1514,7 +1529,7 @@ private static FieldDTO dto2FieldDTO(DatasetVersionDTO datasetVersionDTO, String } return null; } - + private static boolean StringUtilisEmpty(String str) { if (str == null || str.trim().equals("")) { diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java index dd01750942d..a2ff980ca28 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java @@ -1271,12 +1271,16 @@ private static void writeDescriptionElement(XMLStreamWriter xmlw, String descrip */ public static void writeGeoLocationsElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO, String language) throws XMLStreamException { // geoLocation -> geoLocationPlace - String geoLocationPlace = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.productionPlace); + List geoLocationPlaces = dto2MultiplePrimitive(datasetVersionDTO, DatasetFieldConstant.productionPlace); boolean geoLocations_check = false; // write geoLocations geoLocations_check = writeOpenTag(xmlw, "geoLocations", geoLocations_check); - writeGeolocationPlace(xmlw, geoLocationPlace, language); + if (geoLocationPlaces != null) { + for (String geoLocationPlace : geoLocationPlaces) { + writeGeolocationPlace(xmlw, geoLocationPlace, language); + } + } // get DatasetFieldConstant.geographicBoundingBox for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) { @@ -1457,6 +1461,26 @@ private static String dto2Primitive(DatasetVersionDTO datasetVersionDTO, String } return null; } + + /** + * + * @param datasetVersionDTO + * @param datasetFieldTypeName + * @return List Multiple Primitive + * + */ + private static List dto2MultiplePrimitive(DatasetVersionDTO datasetVersionDTO, String datasetFieldTypeName) { + // give the single value of the given metadata + for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) { + MetadataBlockDTO value = entry.getValue(); + for (FieldDTO fieldDTO : value.getFields()) { + if (datasetFieldTypeName.equals(fieldDTO.getTypeName())) { + return fieldDTO.getMultiplePrimitive(); + } + } + } + return null; + } /** * Write a full tag. diff --git a/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java b/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java index c1162eb8db6..60742ca8a91 100644 --- a/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java +++ b/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java @@ -55,4 +55,10 @@ public JsonObjectBuilder toJsonObjectBuilder() { .add("body", body); } + public JsonObjectBuilder toLimitedJsonObjectBuilder() { + return new NullSafeJsonBuilder() + .add("fromEmail", fromEmail) + .add("subject", subject) + .add("body", body); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java index ba6f5c3dec2..6a85219cc3c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java @@ -6,7 +6,10 @@ package edu.harvard.iq.dataverse.harvest.client; import java.io.Serializable; +import java.util.Arrays; import java.util.Date; + +import edu.harvard.iq.dataverse.util.BundleUtil; import jakarta.persistence.Entity; import jakarta.persistence.GeneratedValue; import jakarta.persistence.GenerationType; @@ -40,13 +43,7 @@ public void setId(Long id) { this.id = id; } - public enum RunResultType { SUCCESS, FAILURE, INPROGRESS, INTERRUPTED }; - - private static String RESULT_LABEL_SUCCESS = "SUCCESS"; - private static String RESULT_LABEL_FAILURE = "FAILED"; - private static String RESULT_LABEL_INPROGRESS = "IN PROGRESS"; - private static String RESULT_DELETE_IN_PROGRESS = "DELETE IN PROGRESS"; - private static String RESULT_LABEL_INTERRUPTED = "INTERRUPTED"; + public enum RunResultType { COMPLETED, COMPLETED_WITH_FAILURES, FAILURE, IN_PROGRESS, INTERRUPTED } @ManyToOne @JoinColumn(nullable = false) @@ -68,36 +65,43 @@ public RunResultType getResult() { public String getResultLabel() { if (harvestingClient != null && harvestingClient.isDeleteInProgress()) { - return RESULT_DELETE_IN_PROGRESS; + return BundleUtil.getStringFromBundle("harvestclients.result.deleteInProgress"); } - - if (isSuccess()) { - return RESULT_LABEL_SUCCESS; + + if (isCompleted()) { + return BundleUtil.getStringFromBundle("harvestclients.result.completed"); + } else if (isCompletedWithFailures()) { + return BundleUtil.getStringFromBundle("harvestclients.result.completedWithFailures"); } else if (isFailed()) { - return RESULT_LABEL_FAILURE; + return BundleUtil.getStringFromBundle("harvestclients.result.failure"); } else if (isInProgress()) { - return RESULT_LABEL_INPROGRESS; + return BundleUtil.getStringFromBundle("harvestclients.result.inProgess"); } else if (isInterrupted()) { - return RESULT_LABEL_INTERRUPTED; + return BundleUtil.getStringFromBundle("harvestclients.result.interrupted"); } return null; } public String getDetailedResultLabel() { if (harvestingClient != null && harvestingClient.isDeleteInProgress()) { - return RESULT_DELETE_IN_PROGRESS; + return BundleUtil.getStringFromBundle("harvestclients.result.deleteInProgress"); } - if (isSuccess() || isInterrupted()) { + if (isCompleted() || isCompletedWithFailures() || isInterrupted()) { String resultLabel = getResultLabel(); - - resultLabel = resultLabel.concat("; "+harvestedDatasetCount+" harvested, "); - resultLabel = resultLabel.concat(deletedDatasetCount+" deleted, "); - resultLabel = resultLabel.concat(failedDatasetCount+" failed."); + + String details = BundleUtil.getStringFromBundle("harvestclients.result.details", Arrays.asList( + harvestedDatasetCount.toString(), + deletedDatasetCount.toString(), + failedDatasetCount.toString() + )); + if(details != null) { + resultLabel = resultLabel + "; " + details; + } return resultLabel; } else if (isFailed()) { - return RESULT_LABEL_FAILURE; + return BundleUtil.getStringFromBundle("harvestclients.result.failure"); } else if (isInProgress()) { - return RESULT_LABEL_INPROGRESS; + return BundleUtil.getStringFromBundle("harvestclients.result.inProgess"); } return null; } @@ -106,12 +110,20 @@ public void setResult(RunResultType harvestResult) { this.harvestResult = harvestResult; } - public boolean isSuccess() { - return RunResultType.SUCCESS == harvestResult; + public boolean isCompleted() { + return RunResultType.COMPLETED == harvestResult; + } + + public void setCompleted() { + harvestResult = RunResultType.COMPLETED; + } + + public boolean isCompletedWithFailures() { + return RunResultType.COMPLETED_WITH_FAILURES == harvestResult; } - public void setSuccess() { - harvestResult = RunResultType.SUCCESS; + public void setCompletedWithFailures() { + harvestResult = RunResultType.COMPLETED_WITH_FAILURES; } public boolean isFailed() { @@ -123,12 +135,12 @@ public void setFailed() { } public boolean isInProgress() { - return RunResultType.INPROGRESS == harvestResult || + return RunResultType.IN_PROGRESS == harvestResult || (harvestResult == null && startTime != null && finishTime == null); } public void setInProgress() { - harvestResult = RunResultType.INPROGRESS; + harvestResult = RunResultType.IN_PROGRESS; } public boolean isInterrupted() { diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java index e0b5c2dfbfb..16580f8f9f1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java @@ -163,7 +163,7 @@ public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId try { if (harvestingClientConfig.isHarvestingNow()) { - hdLogger.log(Level.SEVERE, "Cannot start harvest, client " + harvestingClientConfig.getName() + " is already harvesting."); + hdLogger.log(Level.SEVERE, String.format("Cannot start harvest, client %s is already harvesting.", harvestingClientConfig.getName())); } else { harvestingClientService.resetHarvestInProgress(harvestingClientId); @@ -176,9 +176,16 @@ public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId } else { throw new IOException("Unsupported harvest type"); } - harvestingClientService.setHarvestSuccess(harvestingClientId, new Date(), harvestedDatasetIds.size(), failedIdentifiers.size(), deletedIdentifiers.size()); - hdLogger.log(Level.INFO, "COMPLETED HARVEST, server=" + harvestingClientConfig.getArchiveUrl() + ", metadataPrefix=" + harvestingClientConfig.getMetadataPrefix()); - hdLogger.log(Level.INFO, "Datasets created/updated: " + harvestedDatasetIds.size() + ", datasets deleted: " + deletedIdentifiers.size() + ", datasets failed: " + failedIdentifiers.size()); + + if (failedIdentifiers.isEmpty()) { + harvestingClientService.setHarvestCompleted(harvestingClientId, new Date(), harvestedDatasetIds.size(), failedIdentifiers.size(), deletedIdentifiers.size()); + hdLogger.log(Level.INFO, String.format("\"COMPLETED HARVEST, server=%s, metadataPrefix=%s", harvestingClientConfig.getArchiveUrl(), harvestingClientConfig.getMetadataPrefix())); + } else { + harvestingClientService.setHarvestCompletedWithFailures(harvestingClientId, new Date(), harvestedDatasetIds.size(), failedIdentifiers.size(), deletedIdentifiers.size()); + hdLogger.log(Level.INFO, String.format("\"COMPLETED HARVEST WITH FAILURES, server=%s, metadataPrefix=%s", harvestingClientConfig.getArchiveUrl(), harvestingClientConfig.getMetadataPrefix())); + } + + hdLogger.log(Level.INFO, String.format("Datasets created/updated: %s, datasets deleted: %s, datasets failed: %s", harvestedDatasetIds.size(), deletedIdentifiers.size(), failedIdentifiers.size())); } } catch (StopHarvestException she) { diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java index 7280b6af129..e73310650b4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java @@ -297,7 +297,7 @@ public ClientHarvestRun getLastSuccessfulRun() { int i = harvestHistory.size() - 1; while (i > -1) { - if (harvestHistory.get(i).isSuccess()) { + if (harvestHistory.get(i).isCompleted() || harvestHistory.get(i).isCompletedWithFailures()) { return harvestHistory.get(i); } i--; @@ -314,7 +314,7 @@ ClientHarvestRun getLastNonEmptyRun() { int i = harvestHistory.size() - 1; while (i > -1) { - if (harvestHistory.get(i).isSuccess()) { + if (harvestHistory.get(i).isCompleted() || harvestHistory.get(i).isCompletedWithFailures()) { if (harvestHistory.get(i).getHarvestedDatasetCount().longValue() > 0 || harvestHistory.get(i).getDeletedDatasetCount().longValue() > 0) { return harvestHistory.get(i); diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java index 7ec6d75a41c..2f76fed1a11 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java @@ -164,8 +164,13 @@ public void deleteClient(Long clientId) { } @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) - public void setHarvestSuccess(Long hcId, Date currentTime, int harvestedCount, int failedCount, int deletedCount) { - recordHarvestJobStatus(hcId, currentTime, harvestedCount, failedCount, deletedCount, ClientHarvestRun.RunResultType.SUCCESS); + public void setHarvestCompleted(Long hcId, Date currentTime, int harvestedCount, int failedCount, int deletedCount) { + recordHarvestJobStatus(hcId, currentTime, harvestedCount, failedCount, deletedCount, ClientHarvestRun.RunResultType.COMPLETED); + } + + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) + public void setHarvestCompletedWithFailures(Long hcId, Date currentTime, int harvestedCount, int failedCount, int deletedCount) { + recordHarvestJobStatus(hcId, currentTime, harvestedCount, failedCount, deletedCount, ClientHarvestRun.RunResultType.COMPLETED_WITH_FAILURES); } @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/oai/OaiHandler.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/oai/OaiHandler.java index bb3dc06972c..4345fb44d8f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/oai/OaiHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/oai/OaiHandler.java @@ -4,6 +4,7 @@ import io.gdcc.xoai.model.oaipmh.results.record.Header; import io.gdcc.xoai.model.oaipmh.results.MetadataFormat; import io.gdcc.xoai.model.oaipmh.results.Set; +import io.gdcc.xoai.model.oaipmh.verbs.Identify; import io.gdcc.xoai.serviceprovider.ServiceProvider; import io.gdcc.xoai.serviceprovider.exceptions.BadArgumentException; import io.gdcc.xoai.serviceprovider.exceptions.InvalidOAIResponse; @@ -289,6 +290,8 @@ private ListIdentifiersParameters buildListIdentifiersParams() throws OaiHandler mip.withMetadataPrefix(metadataPrefix); if (this.fromDate != null) { + Identify identify = runIdentify(); + mip.withGranularity(identify.getGranularity().toString()); mip.withFrom(this.fromDate.toInstant()); } @@ -311,10 +314,13 @@ public String getProprietaryDataverseMetadataURL(String identifier) { return requestURL.toString(); } - public void runIdentify() { - // not implemented yet - // (we will need it, both for validating the remote server, - // and to learn about its extended capabilities) + public Identify runIdentify() throws OaiHandlerException { + ServiceProvider sp = getServiceProvider(); + try { + return sp.identify(); + } catch (InvalidOAIResponse ior) { + throw new OaiHandlerException("No valid response received from the OAI server."); + } } public Map makeCustomHeaders(String headersString) { diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java index fa83552a9ec..4752022b570 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java @@ -143,13 +143,29 @@ public String[] getTestFormatSet() { return this.testFormatSet; } + /*ToDo + * Rather than making these tests just methods, perhaps they could be implemented as + * classes inheriting a common interface. In addition to the existing ~test*format methods, + * the interface could include a method indicating whether the test requires + * the whole file or, if not, how many bytes are needed. That would make it easier to + * decide whether to use the test on direct/remote uploads (where retrieving a big file may not be worth it, + * but retrieving the 42 bytes needed for a stata check or the ~491 bytes needed for a por check) could be. + * + * Could also add a method to indicate which mimetypes the test can identify/refine which + * might make it possible to replace FileUtil.useRecognizedType(String, String) at some point. + * + * It might also make sense to make this interface broader than just the current ingestable types, + * e.g. to support the NetCDF, graphML and other checks in the same framework. (Some of these might only + * support using a file rather than a bytebuffer though.) + */ + // test methods start here ------------------------------------------------ /** * test this byte buffer against SPSS-SAV spec * * */ - public String testSAVformat(MappedByteBuffer buff) { + public String testSAVformat(ByteBuffer buff) { String result = null; buff.rewind(); boolean DEBUG = false; @@ -192,7 +208,7 @@ public String testSAVformat(MappedByteBuffer buff) { * test this byte buffer against STATA DTA spec * */ - public String testDTAformat(MappedByteBuffer buff) { + public String testDTAformat(ByteBuffer buff) { String result = null; buff.rewind(); boolean DEBUG = false; @@ -311,7 +327,7 @@ public String testDTAformat(MappedByteBuffer buff) { * test this byte buffer against SAS Transport(XPT) spec * */ - public String testXPTformat(MappedByteBuffer buff) { + public String testXPTformat(ByteBuffer buff) { String result = null; buff.rewind(); boolean DEBUG = false; @@ -359,7 +375,7 @@ public String testXPTformat(MappedByteBuffer buff) { * test this byte buffer against SPSS Portable (POR) spec * */ - public String testPORformat(MappedByteBuffer buff) { + public String testPORformat(ByteBuffer buff) { String result = null; buff.rewind(); boolean DEBUG = false; @@ -525,7 +541,7 @@ public String testPORformat(MappedByteBuffer buff) { * test this byte buffer against R data file * */ - public String testRDAformat(MappedByteBuffer buff) { + public String testRDAformat(ByteBuffer buff) { String result = null; buff.rewind(); @@ -607,11 +623,10 @@ public String testRDAformat(MappedByteBuffer buff) { // public instance methods ------------------------------------------------ public String detectTabularDataFormat(File fh) { - boolean DEBUG = false; - String readableFormatType = null; + FileChannel srcChannel = null; FileInputStream inp = null; - + try { // set-up a FileChannel instance for a given file object inp = new FileInputStream(fh); @@ -621,63 +636,7 @@ public String detectTabularDataFormat(File fh) { // create a read-only MappedByteBuffer MappedByteBuffer buff = srcChannel.map(FileChannel.MapMode.READ_ONLY, 0, buffer_size); - - //this.printHexDump(buff, "hex dump of the byte-buffer"); - - buff.rewind(); - dbgLog.fine("before the for loop"); - for (String fmt : this.getTestFormatSet()) { - - // get a test method - Method mthd = testMethods.get(fmt); - //dbgLog.info("mthd: " + mthd.getName()); - - try { - // invoke this method - Object retobj = mthd.invoke(this, buff); - String result = (String) retobj; - - if (result != null) { - dbgLog.fine("result for (" + fmt + ")=" + result); - if (DEBUG) { - out.println("result for (" + fmt + ")=" + result); - } - if (readableFileTypes.contains(result)) { - readableFormatType = result; - } - dbgLog.fine("readableFormatType=" + readableFormatType); - } else { - dbgLog.fine("null was returned for " + fmt + " test"); - if (DEBUG) { - out.println("null was returned for " + fmt + " test"); - } - } - } catch (InvocationTargetException e) { - Throwable cause = e.getCause(); - // added null check because of "homemade.zip" from https://redmine.hmdc.harvard.edu/issues/3273 - if (cause.getMessage() != null) { - err.format(cause.getMessage()); - e.printStackTrace(); - } else { - dbgLog.info("cause.getMessage() was null for " + e); - e.printStackTrace(); - } - } catch (IllegalAccessException e) { - e.printStackTrace(); - } catch (BufferUnderflowException e){ - dbgLog.info("BufferUnderflowException " + e); - e.printStackTrace(); - } - - if (readableFormatType != null) { - break; - } - } - - // help garbage-collect the mapped buffer sooner, to avoid the jvm - // holding onto the underlying file unnecessarily: - buff = null; - + return detectTabularDataFormat(buff); } catch (FileNotFoundException fe) { dbgLog.fine("exception detected: file was not foud"); fe.printStackTrace(); @@ -688,8 +647,73 @@ public String detectTabularDataFormat(File fh) { IOUtils.closeQuietly(srcChannel); IOUtils.closeQuietly(inp); } + return null; + } + + public String detectTabularDataFormat(ByteBuffer buff) { + boolean DEBUG = false; + String readableFormatType = null; + + // this.printHexDump(buff, "hex dump of the byte-buffer"); + + buff.rewind(); + dbgLog.fine("before the for loop"); + for (String fmt : this.getTestFormatSet()) { + + // get a test method + Method mthd = testMethods.get(fmt); + // dbgLog.info("mthd: " + mthd.getName()); + + try { + // invoke this method + Object retobj = mthd.invoke(this, buff); + String result = (String) retobj; + + if (result != null) { + dbgLog.fine("result for (" + fmt + ")=" + result); + if (DEBUG) { + out.println("result for (" + fmt + ")=" + result); + } + if (readableFileTypes.contains(result)) { + readableFormatType = result; + } + dbgLog.fine("readableFormatType=" + readableFormatType); + } else { + dbgLog.fine("null was returned for " + fmt + " test"); + if (DEBUG) { + out.println("null was returned for " + fmt + " test"); + } + } + } catch (InvocationTargetException e) { + Throwable cause = e.getCause(); + // added null check because of "homemade.zip" from + // https://redmine.hmdc.harvard.edu/issues/3273 + if (cause.getMessage() != null) { + err.format(cause.getMessage()); + e.printStackTrace(); + } else { + dbgLog.info("cause.getMessage() was null for " + e); + e.printStackTrace(); + } + } catch (IllegalAccessException e) { + e.printStackTrace(); + } catch (BufferUnderflowException e) { + dbgLog.info("BufferUnderflowException " + e); + e.printStackTrace(); + } + + if (readableFormatType != null) { + break; + } + } + + // help garbage-collect the mapped buffer sooner, to avoid the jvm + // holding onto the underlying file unnecessarily: + buff = null; + return readableFormatType; } + /** * identify the first 5 bytes @@ -737,7 +761,7 @@ private long getBufferSize(FileChannel fileChannel) { return BUFFER_SIZE; } - private int getGzipBufferSize(MappedByteBuffer buff) { + private int getGzipBufferSize(ByteBuffer buff) { int GZIP_BUFFER_SIZE = 120; /* note: diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java index 5626a442762..091fbde484e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java @@ -439,19 +439,6 @@ private boolean runStep1RoleAssignments() { if (results == null) { this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.null")); return false; - } else if (results.isEmpty()) { - List roleNames = this.rolePermissionHelper.getRoleNamesByIdList(this.filterParams.getRoleIds()); - if ((roleNames == null) || (roleNames.isEmpty())) { - this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.no.role")); - } else { - final List args = Arrays.asList(StringUtils.join(roleNames, ", ")); - if (roleNames.size() == 1) { - this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.role.empty", args)); - } else { - this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.roles.empty", args)); - } - } - return false; } // Iterate through assigned objects, a single object may end up in @@ -485,6 +472,21 @@ private boolean runStep1RoleAssignments() { } directDvObjectIds.add(dvId); } + + if (directDvObjectIds.isEmpty()) { + List roleNames = this.rolePermissionHelper.getRoleNamesByIdList(this.filterParams.getRoleIds()); + if ((roleNames == null) || (roleNames.isEmpty())) { + this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.no.role")); + } else { + final List args = Arrays.asList(StringUtils.join(roleNames, ", ")); + if (roleNames.size() == 1) { + this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.role.empty", args)); + } else { + this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.roles.empty", args)); + } + } + return false; + } return true; } diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/AbstractPidProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/AbstractPidProvider.java index 250eae7e5fc..acb0b7e7518 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/AbstractPidProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/AbstractPidProvider.java @@ -204,6 +204,16 @@ public DvObject generatePid(DvObject dvObject) { + ") doesn't match that of the provider, id: " + getId()); } } + if (dvObject.getSeparator() == null) { + dvObject.setSeparator(getSeparator()); + } else { + if (!dvObject.getSeparator().equals(getSeparator())) { + logger.warning("The separator of the DvObject (" + dvObject.getSeparator() + + ") does not match the configured separator (" + getSeparator() + ")"); + throw new IllegalArgumentException("The separator of the DvObject (" + dvObject.getSeparator() + + ") doesn't match that of the provider, id: " + getId()); + } + } if (dvObject.isInstanceofDataset()) { dvObject.setIdentifier(generateDatasetIdentifier((Dataset) dvObject)); } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidProviderFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidProviderFactoryBean.java index b01fb5e7eba..c4d6aa4ea21 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidProviderFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidProviderFactoryBean.java @@ -205,7 +205,7 @@ private void loadProviders() { passphrase); break; case "perma": - String baseUrl = JvmSettings.LEGACY_PERMALINK_BASEURL.lookup(); + String baseUrl = JvmSettings.LEGACY_PERMALINK_BASEURL.lookupOptional().orElse(SystemConfig.getDataverseSiteUrlStatic()); legacy = new PermaLinkPidProvider("legacy", "legacy", authority, shoulder, identifierGenerationStyle, dataFilePidFormat, "", "", baseUrl, PermaLinkPidProvider.SEPARATOR); diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java index 279f18dcd0e..003b4e3f61c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; import edu.harvard.iq.dataverse.pidproviders.handle.HandlePidProvider; +import edu.harvard.iq.dataverse.pidproviders.perma.PermaLinkPidProvider; import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.IOException; import java.io.InputStream; @@ -252,7 +253,12 @@ public static void clearPidProviders() { * Get a PidProvider by protocol/authority/shoulder. */ public static PidProvider getPidProvider(String protocol, String authority, String shoulder) { - return getPidProvider(protocol, authority, shoulder, AbstractPidProvider.SEPARATOR); + switch(protocol) { + case PermaLinkPidProvider.PERMA_PROTOCOL: + return getPidProvider(protocol, authority, shoulder, PermaLinkPidProvider.SEPARATOR); + default: + return getPidProvider(protocol, authority, shoulder, AbstractPidProvider.SEPARATOR); + } } public static PidProvider getPidProvider(String protocol, String authority, String shoulder, String separator) { diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java index 8199b7d9c9f..201a5f5f781 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java @@ -626,7 +626,7 @@ private void writeEntityElements(XMLStreamWriter xmlw, String elementName, Strin attributeMap.clear(); boolean isROR=false; String orgName = affiliation; - ExternalIdentifier externalIdentifier = ExternalIdentifier.ROR; + ExternalIdentifier externalIdentifier = ExternalIdentifier.ROR_FULL_URL; if (externalIdentifier.isValidIdentifier(orgName)) { isROR = true; JsonObject jo = getExternalVocabularyValue(orgName); @@ -639,7 +639,7 @@ private void writeEntityElements(XMLStreamWriter xmlw, String elementName, Strin attributeMap.put("schemeURI", "https://ror.org"); attributeMap.put("affiliationIdentifierScheme", "ROR"); - attributeMap.put("affiliationIdentifier", orgName); + attributeMap.put("affiliationIdentifier", affiliation); } XmlWriterUtil.writeFullElementWithAttributes(xmlw, "affiliation", attributeMap, StringEscapeUtils.escapeXml10(orgName)); @@ -1528,7 +1528,7 @@ private void writeFundingReferences(XMLStreamWriter xmlw, DvObject dvObject) thr fundingReferenceWritten = XmlWriterUtil.writeOpenTagIfNeeded(xmlw, "fundingReferences", fundingReferenceWritten); boolean isROR=false; String funderIdentifier = null; - ExternalIdentifier externalIdentifier = ExternalIdentifier.ROR; + ExternalIdentifier externalIdentifier = ExternalIdentifier.ROR_FULL_URL; if (externalIdentifier.isValidIdentifier(funder)) { isROR = true; JsonObject jo = getExternalVocabularyValue(funder); diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/fake/FakeDOIProvider.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/fake/FakeDOIProvider.java index a967fb40620..023b766f2ac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/fake/FakeDOIProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/fake/FakeDOIProvider.java @@ -44,8 +44,11 @@ public List getProviderInformation() { } @Override - public String createIdentifier(DvObject dvo) throws Throwable { - return "fakeIdentifier"; + public String createIdentifier(DvObject dvObject) throws Throwable { + if(dvObject.getIdentifier() == null || dvObject.getIdentifier().isEmpty() ){ + dvObject = generatePid(dvObject); + } + return dvObject.getIdentifier(); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/search/AbstractSolrClientService.java b/src/main/java/edu/harvard/iq/dataverse/search/AbstractSolrClientService.java new file mode 100644 index 00000000000..1ae236d348f --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/search/AbstractSolrClientService.java @@ -0,0 +1,51 @@ +package edu.harvard.iq.dataverse.search; + +import java.io.IOException; +import java.util.logging.Logger; + +import org.apache.solr.client.solrj.SolrClient; + +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.SystemConfig; +import jakarta.ejb.EJB; + +/** + * Generics methods for Solr clients implementations + * + * @author jeromeroucou + */ +public abstract class AbstractSolrClientService { + private static final Logger logger = Logger.getLogger(AbstractSolrClientService.class.getCanonicalName()); + + @EJB + SystemConfig systemConfig; + + public abstract void init(); + public abstract void close(); + public abstract SolrClient getSolrClient(); + public abstract void setSolrClient(SolrClient solrClient); + + public void close(SolrClient solrClient) { + if (solrClient != null) { + try { + solrClient.close(); + } catch (IOException e) { + logger.warning("Solr closing error: " + e); + } + solrClient = null; + } + } + + public void reInitialize() { + close(); + init(); + } + + public String getSolrUrl() { + // Get from MPCONFIG. Might be configured by a sysadmin or simply return the + // default shipped with resources/META-INF/microprofile-config.properties. + final String protocol = JvmSettings.SOLR_PROT.lookup(); + final String path = JvmSettings.SOLR_PATH.lookup(); + return protocol + "://" + this.systemConfig.getSolrHostColonPort() + path; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 9b7998b0a8e..839dd4a7e08 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -1,8 +1,34 @@ package edu.harvard.iq.dataverse.search; -import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.ControlledVocabularyValue; +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DataFileServiceBean; +import edu.harvard.iq.dataverse.DataFileTag; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetField; +import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; +import edu.harvard.iq.dataverse.DatasetFieldConstant; +import edu.harvard.iq.dataverse.DatasetFieldServiceBean; +import edu.harvard.iq.dataverse.DatasetFieldType; +import edu.harvard.iq.dataverse.DatasetFieldValue; +import edu.harvard.iq.dataverse.DatasetFieldValueValidator; +import edu.harvard.iq.dataverse.DatasetLinkingServiceBean; +import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DatasetVersion.VersionState; +import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; +import edu.harvard.iq.dataverse.DatasetVersionServiceBean; +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseLinkingServiceBean; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.DvObject.DType; +import edu.harvard.iq.dataverse.DvObjectServiceBean; +import edu.harvard.iq.dataverse.Embargo; +import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.PermissionServiceBean; +import edu.harvard.iq.dataverse.Retention; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; import edu.harvard.iq.dataverse.batch.util.LoggingUtil; @@ -27,6 +53,8 @@ import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; @@ -44,9 +72,8 @@ import java.util.function.Function; import java.util.logging.Level; import java.util.logging.Logger; +import java.util.regex.Pattern; import java.util.stream.Collectors; -import jakarta.annotation.PostConstruct; -import jakarta.annotation.PreDestroy; import jakarta.ejb.AsyncResult; import jakarta.ejb.Asynchronous; import jakarta.ejb.EJB; @@ -63,11 +90,9 @@ import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; -import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery.SortClause; import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.UpdateResponse; import org.apache.solr.common.SolrDocument; @@ -122,16 +147,15 @@ public class IndexServiceBean { @EJB SettingsServiceBean settingsService; @EJB - SolrClientService solrClientService; + SolrClientService solrClientService; // only for query index on Solr + @EJB + SolrClientIndexService solrClientIndexService; // only for add, update, or remove index on Solr @EJB DataFileServiceBean dataFileService; @EJB VariableServiceBean variableService; - - @EJB - IndexBatchServiceBean indexBatchService; - + @EJB DatasetFieldServiceBean datasetFieldService; @@ -154,37 +178,10 @@ public class IndexServiceBean { private static final String IN_REVIEW_STRING = "In Review"; private static final String DEACCESSIONED_STRING = "Deaccessioned"; public static final String HARVESTED = "Harvested"; - private String rootDataverseName; private Dataverse rootDataverseCached; - SolrClient solrServer; private VariableMetadataUtil variableMetadataUtil; - @PostConstruct - public void init() { - // Get from MPCONFIG. Might be configured by a sysadmin or simply return the default shipped with - // resources/META-INF/microprofile-config.properties. - String protocol = JvmSettings.SOLR_PROT.lookup(); - String path = JvmSettings.SOLR_PATH.lookup(); - - String urlString = protocol + "://" + systemConfig.getSolrHostColonPort() + path; - solrServer = new HttpSolrClient.Builder(urlString).build(); - - rootDataverseName = findRootDataverseCached().getName(); - } - - @PreDestroy - public void close() { - if (solrServer != null) { - try { - solrServer.close(); - } catch (IOException e) { - logger.warning("Solr closing error: " + e); - } - solrServer = null; - } - } - @TransactionAttribute(REQUIRES_NEW) public Future indexDataverseInNewTransaction(Dataverse dataverse) throws SolrServerException, IOException{ return indexDataverse(dataverse, false); @@ -323,7 +320,7 @@ public Future indexDataverse(Dataverse dataverse, boolean processPaths) String status; try { if (dataverse.getId() != null) { - solrClientService.getSolrClient().add(docs); + solrClientIndexService.getSolrClient().add(docs); } else { logger.info("WARNING: indexing of a dataverse with no id attempted"); } @@ -1065,34 +1062,89 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set indexableValues = dsf.getValuesWithoutNaValues().stream() + .filter(s -> intPattern.matcher(s).find()) + .collect(Collectors.toList()); + solrInputDocument.addField(solrFieldSearchable, indexableValues); + if (dsfType.getSolrField().isFacetable()) { + solrInputDocument.addField(solrFieldFacetable, indexableValues); + } + } else if (dsfType.getSolrField().getSolrType().equals(SolrField.SolrType.FLOAT)) { + // same as for integer values, we need to filter invalid float values + List indexableValues = dsf.getValuesWithoutNaValues().stream() + .filter(s -> { + try { + Double.parseDouble(s); + return true; + } catch (NumberFormatException e) { + return false; + } + }) + .collect(Collectors.toList()); + solrInputDocument.addField(solrFieldSearchable, indexableValues); + if (dsfType.getSolrField().isFacetable()) { + solrInputDocument.addField(solrFieldFacetable, indexableValues); + } } else if (dsfType.getSolrField().getSolrType().equals(SolrField.SolrType.DATE)) { + // Solr accepts dates in the ISO-8601 format, e.g. YYYY-MM-DDThh:mm:ssZ, YYYYY-MM-DD, YYYY-MM, YYYY + // See: https://solr.apache.org/guide/solr/latest/indexing-guide/date-formatting-math.html + // If dates have been entered in other formats, we need to skip or convert them + // TODO at the moment we are simply skipping, but converting them would offer more value for search + // For use in facets, we index only the year (YYYY) String dateAsString = ""; if (!dsf.getValues_nondisplay().isEmpty()) { - dateAsString = dsf.getValues_nondisplay().get(0); - } + dateAsString = dsf.getValues_nondisplay().get(0).trim(); + } + logger.fine("date as string: " + dateAsString); + if (dateAsString != null && !dateAsString.isEmpty()) { - SimpleDateFormat inputDateyyyy = new SimpleDateFormat("yyyy", Locale.ENGLISH); - try { - /** - * @todo when bean validation is working we - * won't have to convert strings into dates - */ - logger.fine("Trying to convert " + dateAsString + " to a YYYY date from dataset " + dataset.getId()); - Date dateAsDate = inputDateyyyy.parse(dateAsString); - SimpleDateFormat yearOnly = new SimpleDateFormat("yyyy"); - String datasetFieldFlaggedAsDate = yearOnly.format(dateAsDate); - logger.fine("YYYY only: " + datasetFieldFlaggedAsDate); - // solrInputDocument.addField(solrFieldSearchable, - // Integer.parseInt(datasetFieldFlaggedAsDate)); - solrInputDocument.addField(solrFieldSearchable, datasetFieldFlaggedAsDate); - if (dsfType.getSolrField().isFacetable()) { - // solrInputDocument.addField(solrFieldFacetable, + boolean dateValid = false; + + DateTimeFormatter[] possibleFormats = { + DateTimeFormatter.ISO_INSTANT, + DateTimeFormatter.ofPattern("yyyy-MM-dd"), + DateTimeFormatter.ofPattern("yyyy-MM"), + DateTimeFormatter.ofPattern("yyyy") + }; + for (DateTimeFormatter format : possibleFormats){ + try { + format.parse(dateAsString); + dateValid = true; + } catch (DateTimeParseException e) { + // no-op, date is invalid + } + } + + if (!dateValid) { + logger.fine("couldn't index " + dsf.getDatasetFieldType().getName() + ":" + dsf.getValues() + " because it's not a valid date format according to Solr"); + } else { + SimpleDateFormat inputDateyyyy = new SimpleDateFormat("yyyy", Locale.ENGLISH); + try { + /** + * @todo when bean validation is working we + * won't have to convert strings into dates + */ + logger.fine("Trying to convert " + dateAsString + " to a YYYY date from dataset " + dataset.getId()); + Date dateAsDate = inputDateyyyy.parse(dateAsString); + SimpleDateFormat yearOnly = new SimpleDateFormat("yyyy"); + String datasetFieldFlaggedAsDate = yearOnly.format(dateAsDate); + logger.fine("YYYY only: " + datasetFieldFlaggedAsDate); + // solrInputDocument.addField(solrFieldSearchable, // Integer.parseInt(datasetFieldFlaggedAsDate)); - solrInputDocument.addField(solrFieldFacetable, datasetFieldFlaggedAsDate); + solrInputDocument.addField(solrFieldSearchable, dateAsString); + if (dsfType.getSolrField().isFacetable()) { + // solrInputDocument.addField(solrFieldFacetable, + // Integer.parseInt(datasetFieldFlaggedAsDate)); + solrInputDocument.addField(solrFieldFacetable, datasetFieldFlaggedAsDate); + } + } catch (Exception ex) { + logger.info("unable to convert " + dateAsString + " into YYYY format and couldn't index it (" + dsfType.getName() + ")"); } - } catch (Exception ex) { - logger.info("unable to convert " + dateAsString + " into YYYY format and couldn't index it (" + dsfType.getName() + ")"); } } } else { @@ -1687,7 +1739,7 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset, Set d final SolrInputDocuments docs = toSolrDocs(indexableDataset, datafilesInDraftVersion); try { - solrClientService.getSolrClient().add(docs.getDocuments()); + solrClientIndexService.getSolrClient().add(docs.getDocuments()); } catch (SolrServerException | IOException ex) { if (ex.getCause() instanceof SolrServerException) { throw new SolrServerException(ex); @@ -1949,7 +2001,7 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc sid.removeField(SearchFields.SUBTREE); sid.addField(SearchFields.SUBTREE, paths); - UpdateResponse addResponse = solrClientService.getSolrClient().add(sid); + UpdateResponse addResponse = solrClientIndexService.getSolrClient().add(sid); if (object.isInstanceofDataset()) { for (DataFile df : dataset.getFiles()) { solrQuery.setQuery(SearchUtil.constructQuery(SearchFields.ENTITY_ID, df.getId().toString())); @@ -1962,7 +2014,7 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc } sid.removeField(SearchFields.SUBTREE); sid.addField(SearchFields.SUBTREE, paths); - addResponse = solrClientService.getSolrClient().add(sid); + addResponse = solrClientIndexService.getSolrClient().add(sid); } } } @@ -2004,7 +2056,7 @@ public String delete(Dataverse doomed) { logger.fine("deleting Solr document for dataverse " + doomed.getId()); UpdateResponse updateResponse; try { - updateResponse = solrClientService.getSolrClient().deleteById(solrDocIdentifierDataverse + doomed.getId()); + updateResponse = solrClientIndexService.getSolrClient().deleteById(solrDocIdentifierDataverse + doomed.getId()); } catch (SolrServerException | IOException ex) { return ex.toString(); } @@ -2024,7 +2076,7 @@ public String removeSolrDocFromIndex(String doomed) { logger.fine("deleting Solr document: " + doomed); UpdateResponse updateResponse; try { - updateResponse = solrClientService.getSolrClient().deleteById(doomed); + updateResponse = solrClientIndexService.getSolrClient().deleteById(doomed); } catch (SolrServerException | IOException ex) { return ex.toString(); } @@ -2227,7 +2279,7 @@ public List findPermissionsInSolrOnly() throws SearchException { boolean done = false; while (!done) { q.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark); - QueryResponse rsp = solrServer.query(q); + QueryResponse rsp = solrClientService.getSolrClient().query(q); String nextCursorMark = rsp.getNextCursorMark(); logger.fine("Next cursor mark (1K entries): " + nextCursorMark); SolrDocumentList list = rsp.getResults(); @@ -2309,7 +2361,7 @@ private List findDvObjectInSolrOnly(String type) throws SearchException solrQuery.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark); QueryResponse rsp = null; try { - rsp = solrServer.query(solrQuery); + rsp = solrClientService.getSolrClient().query(solrQuery); } catch (SolrServerException | IOException ex) { throw new SearchException("Error searching Solr type: " + type, ex); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index 60bcc9f846e..d50fbee681c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -280,7 +280,7 @@ public SolrQueryResponse search( List datasetFields = datasetFieldService.findAllOrderedById(); Map solrFieldsToHightlightOnMap = new HashMap<>(); if (addHighlights) { - solrQuery.setHighlight(true).setHighlightSnippets(1); + solrQuery.setHighlight(true).setHighlightSnippets(1).setHighlightRequireFieldMatch(true); Integer fragSize = systemConfig.getSearchHighlightFragmentSize(); if (fragSize != null) { solrQuery.setHighlightFragsize(fragSize); @@ -335,9 +335,13 @@ public SolrQueryResponse search( // ----------------------------------- // PERMISSION FILTER QUERY // ----------------------------------- - String permissionFilterQuery = this.getPermissionFilterQuery(dataverseRequest, solrQuery, onlyDatatRelatedToMe, addFacets); - if (!StringUtils.isBlank(permissionFilterQuery)) { - solrQuery.addFilterQuery(permissionFilterQuery); + String permissionFilterQuery = getPermissionFilterQuery(dataverseRequest, solrQuery, onlyDatatRelatedToMe, addFacets); + if (!permissionFilterQuery.isEmpty()) { + String[] filterParts = permissionFilterQuery.split("&q1="); + solrQuery.addFilterQuery(filterParts[0]); + if(filterParts.length > 1 ) { + solrQuery.add("q1", filterParts[1]); + } } /** @@ -1099,9 +1103,9 @@ private String buildPermissionFilterQuery(boolean avoidJoin, String permissionFi String query = (avoidJoin&& !isAllGroups(permissionFilterGroups)) ? SearchFields.PUBLIC_OBJECT + ":" + true : ""; if (permissionFilterGroups != null && !isAllGroups(permissionFilterGroups)) { if (!query.isEmpty()) { - query = "(" + query + " OR " + "{!join from=" + SearchFields.DEFINITION_POINT + " to=id}" + SearchFields.DISCOVERABLE_BY + ":" + permissionFilterGroups + ")"; + query = "(" + query + " OR " + "{!join from=" + SearchFields.DEFINITION_POINT + " to=id v=$q1})&q1=" + SearchFields.DISCOVERABLE_BY + ":" + permissionFilterGroups; } else { - query = "{!join from=" + SearchFields.DEFINITION_POINT + " to=id}" + SearchFields.DISCOVERABLE_BY + ":" + permissionFilterGroups; + query = "{!join from=" + SearchFields.DEFINITION_POINT + " to=id v=$q1}&q1=" + SearchFields.DISCOVERABLE_BY + ":" + permissionFilterGroups; } } return query; diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrClientIndexService.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrClientIndexService.java new file mode 100644 index 00000000000..0b7f1aae798 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrClientIndexService.java @@ -0,0 +1,49 @@ +package edu.harvard.iq.dataverse.search; + +import java.util.logging.Logger; + +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.impl.ConcurrentUpdateHttp2SolrClient; +import org.apache.solr.client.solrj.impl.Http2SolrClient; + +import jakarta.annotation.PostConstruct; +import jakarta.annotation.PreDestroy; +import jakarta.ejb.Singleton; +import jakarta.inject.Named; + +/** + * Solr client to provide insert/update/delete operations. + * Don't use this service with queries to Solr, use {@link SolrClientService} instead. + */ +@Named +@Singleton +public class SolrClientIndexService extends AbstractSolrClientService { + + private static final Logger logger = Logger.getLogger(SolrClientIndexService.class.getCanonicalName()); + + private SolrClient solrClient; + + @PostConstruct + public void init() { + solrClient = new ConcurrentUpdateHttp2SolrClient.Builder( + getSolrUrl(), new Http2SolrClient.Builder().build()).build(); + } + + @PreDestroy + public void close() { + close(solrClient); + } + + public SolrClient getSolrClient() { + // Should never happen - but? + if (solrClient == null) { + init(); + } + return solrClient; + } + + public void setSolrClient(SolrClient solrClient) { + this.solrClient = solrClient; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java index b36130de7c8..f9d94b8c6d3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java @@ -1,65 +1,39 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - */ package edu.harvard.iq.dataverse.search; -import edu.harvard.iq.dataverse.settings.JvmSettings; -import edu.harvard.iq.dataverse.util.SystemConfig; import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.apache.solr.client.solrj.impl.Http2SolrClient; import jakarta.annotation.PostConstruct; import jakarta.annotation.PreDestroy; -import jakarta.ejb.EJB; import jakarta.ejb.Singleton; import jakarta.inject.Named; -import java.io.IOException; import java.util.logging.Logger; /** * * @author landreev * - * This singleton is dedicated to initializing the HttpSolrClient used by the - * application to talk to the search engine, and serving it to all the other - * classes that need it. + * This singleton is dedicated to initializing the Http2SolrClient, used by + * the application to talk to the search engine, and serving it to all the + * other classes that need it. * This ensures that we are using one client only - as recommended by the * documentation. */ @Named @Singleton -public class SolrClientService { +public class SolrClientService extends AbstractSolrClientService { private static final Logger logger = Logger.getLogger(SolrClientService.class.getCanonicalName()); - @EJB - SystemConfig systemConfig; - private SolrClient solrClient; @PostConstruct public void init() { - // Get from MPCONFIG. Might be configured by a sysadmin or simply return the default shipped with - // resources/META-INF/microprofile-config.properties. - String protocol = JvmSettings.SOLR_PROT.lookup(); - String path = JvmSettings.SOLR_PATH.lookup(); - - String urlString = protocol + "://" + systemConfig.getSolrHostColonPort() + path; - solrClient = new HttpSolrClient.Builder(urlString).build(); + solrClient = new Http2SolrClient.Builder(getSolrUrl()).build(); } @PreDestroy public void close() { - if (solrClient != null) { - try { - solrClient.close(); - } catch (IOException e) { - logger.warning("Solr closing error: " + e); - } - - solrClient = null; - } + close(solrClient); } public SolrClient getSolrClient() { @@ -73,9 +47,4 @@ public SolrClient getSolrClient() { public void setSolrClient(SolrClient solrClient) { this.solrClient = solrClient; } - - public void reInitialize() { - close(); - init(); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrField.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrField.java index ca9805b6c57..7092a01beb1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrField.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrField.java @@ -63,7 +63,7 @@ public enum SolrType { * support range queries) in * https://github.com/IQSS/dataverse/issues/370 */ - STRING("string"), TEXT_EN("text_en"), INTEGER("int"), LONG("long"), DATE("text_en"), EMAIL("text_en"); + STRING("string"), TEXT_EN("text_en"), INTEGER("plong"), FLOAT("pdouble"), DATE("date_range"), EMAIL("text_en"); private String type; diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java index e4d885276d0..2b4f08807ef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java @@ -46,9 +46,7 @@ public class SolrIndexServiceBean { @EJB DataverseRoleServiceBean rolesSvc; @EJB - IndexServiceBean indexService; - @EJB - SolrClientService solrClientService; + SolrClientIndexService solrClientService; public static String numRowsClearedByClearAllIndexTimes = "numRowsClearedByClearAllIndexTimes"; public static String messageString = "message"; @@ -155,7 +153,15 @@ private List constructDatafileSolrDocs(DataFile dataFile, Map desiredCards = searchPermissionsService.getDesiredCards(dataFile.getOwner()); for (DatasetVersion datasetVersionFileIsAttachedTo : datasetVersionsToBuildCardsFor(dataFile.getOwner())) { boolean cardShouldExist = desiredCards.get(datasetVersionFileIsAttachedTo.getVersionState()); - if (cardShouldExist) { + /* + * Since datasetVersionFileIsAttachedTo should be a draft or the most recent + * released one, it could be more efficient to stop the search through + * FileMetadatas after those two (versus continuing through all prior versions + * as in isInDatasetVersion). Alternately, perhaps filesToReIndexPermissionsFor + * should not combine the list of files for the different datsetversions into a + * single list to start with. + */ + if (cardShouldExist && dataFile.isInDatasetVersion(datasetVersionFileIsAttachedTo)) { String solrIdStart = IndexServiceBean.solrDocIdentifierFile + dataFile.getId(); String solrIdEnd = getDatasetOrDataFileSolrEnding(datasetVersionFileIsAttachedTo.getVersionState()); String solrId = solrIdStart + solrIdEnd; @@ -375,6 +381,12 @@ public IndexResponse indexPermissionsOnSelfAndChildren(long definitionPointId) { * inheritance */ public IndexResponse indexPermissionsOnSelfAndChildren(DvObject definitionPoint) { + + if (definitionPoint == null) { + logger.log(Level.WARNING, "Cannot perform indexPermissionsOnSelfAndChildren with a definitionPoint null"); + return null; + } + List filesToReindexAsBatch = new ArrayList<>(); /** * @todo Re-indexing the definition point itself seems to be necessary diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java index 2242b0f51c6..04ae0018323 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java @@ -90,7 +90,6 @@ public enum FeatureFlags { * @since Dataverse 6.3 */ INDEX_HARVESTED_METADATA_SOURCE("index-harvested-metadata-source"), - /** * Dataverse normally deletes all solr documents related to a dataset's files * when the dataset is reindexed. With this flag enabled, additional logic is diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index d7eea970b8a..482c5ecbbb5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -52,6 +52,9 @@ public enum JvmSettings { GUESTBOOK_AT_REQUEST(SCOPE_FILES, "guestbook-at-request"), GLOBUS_CACHE_MAXAGE(SCOPE_FILES, "globus-cache-maxage"), GLOBUS_TASK_MONITORING_SERVER(SCOPE_FILES, "globus-monitoring-server"), + SCOPE_FEATURED_ITEMS(SCOPE_FILES, "featured-items"), + FEATURED_ITEMS_IMAGE_MAXSIZE(SCOPE_FEATURED_ITEMS, "image-maxsize"), + FEATURED_ITEMS_IMAGE_UPLOADS_DIRECTORY(SCOPE_FEATURED_ITEMS, "image-uploads"), //STORAGE DRIVER SETTINGS SCOPE_DRIVER(SCOPE_FILES), diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index b5eb483c2c8..5b0a178969b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -684,7 +684,9 @@ Whether Harvesting (OAI) service is enabled * When ingesting tabular data files, store the generated tab-delimited * files *with* the variable names line up top. */ - StoreIngestedTabularFilesWithVarHeaders + StoreIngestedTabularFilesWithVarHeaders, + + ContactFeedbackMessageSizeLimit ; @Override @@ -749,6 +751,23 @@ public Long getValueForKeyAsLong(Key key){ return null; } + } + + /** + * Attempt to convert the value to an integer + * - Applicable for keys such as MaxFileUploadSizeInBytes + * + * On failure (key not found or string not convertible to a long), returns defaultValue + * @param key + * @param defaultValue + * @return + */ + public Long getValueForKeyAsLong(Key key, Long defaultValue) { + Long val = getValueForKeyAsLong(key); + if (val == null) { + return defaultValue; + } + return val; } /** diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 991682ec8e8..924566cc0ba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -62,6 +62,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; @@ -103,6 +104,7 @@ import java.util.Arrays; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.tika.Tika; import ucar.nc2.NetcdfFile; import ucar.nc2.NetcdfFiles; @@ -426,7 +428,43 @@ public static String retestIngestableFileType(File file, String fileType) { return newType != null ? newType : fileType; } - public static String determineFileType(File f, String fileName) throws IOException{ + public static String determineRemoteFileType(DataFile df, String fileName) { + String fileType = determineFileTypeByNameAndExtension(fileName); + + if (!StringUtils.isBlank(fileType) && fileType.startsWith("application/x-stata")) { + String driverId = DataAccess + .getStorageDriverFromIdentifier(df.getStorageIdentifier()); + if (StorageIO.isDataverseAccessible(driverId)) { + try { + StorageIO storage = df.getStorageIO(); + storage.open(DataAccessOption.READ_ACCESS); + try (InputStream is = storage.getInputStream()) { + + // Read the first 42 bytes of the file to determine the file type + byte[] buffer = new byte[42]; + is.read(buffer, 0, 42); + ByteBuffer bb = ByteBuffer.allocate(42); + bb.put(buffer); + + // step 1: + // Apply our custom methods to try and recognize data files that can be + // converted to tabular data + logger.fine("Attempting to identify potential tabular data files;"); + IngestableDataChecker tabChk = new IngestableDataChecker(new String[] { "DTA" }); + fileType = tabChk.detectTabularDataFormat(bb); + } catch (IOException ex) { + logger.warning("Unable to getInputStream for storageIdentifier: " + df.getStorageIdentifier()); + } + } catch (IOException ex) { + logger.warning("Unable to open storageIO for storageIdentifier: " + df.getStorageIdentifier()); + } + } + } + return fileType; + + } + + public static String determineFileType(File f, String fileName) throws IOException { String fileType = lookupFileTypeByFileName(fileName); if (fileType != null) { return fileType; @@ -495,6 +533,7 @@ public static String determineFileType(File f, String fileName) throws IOExcepti logger.fine("mime type recognized by extension: "+fileType); } } else { + //ToDo - if the extension is null, how can this call do anything logger.fine("fileExtension is null"); final String fileTypeByExtension = lookupFileTypeByExtensionFromPropertiesFile(fileName); if(!StringUtil.isEmpty(fileTypeByExtension)) { @@ -568,21 +607,23 @@ private static String lookupFileTypeByExtension(final String fileName) { } private static String lookupFileTypeByFileName(final String fileName) { - return lookupFileTypeFromPropertiesFile("MimeTypeDetectionByFileName", fileName); + return lookupFileTypeFromPropertiesFile(fileName, false); } private static String lookupFileTypeByExtensionFromPropertiesFile(final String fileName) { final String fileKey = FilenameUtils.getExtension(fileName); - return lookupFileTypeFromPropertiesFile("MimeTypeDetectionByFileExtension", fileKey); + return lookupFileTypeFromPropertiesFile(fileKey, true); } - private static String lookupFileTypeFromPropertiesFile(final String propertyFileName, final String fileKey) { + private static String lookupFileTypeFromPropertiesFile(final String fileKey, boolean byExtension) { + final String propertyFileName = byExtension ? "MimeTypeDetectionByFileExtension" : "MimeTypeDetectionByFileName"; final String propertyFileNameOnDisk = propertyFileName + ".properties"; try { logger.fine("checking " + propertyFileNameOnDisk + " for file key " + fileKey); return BundleUtil.getStringFromPropertyFile(fileKey, propertyFileName); } catch (final MissingResourceException ex) { - logger.info(fileKey + " is a filename/extension Dataverse doesn't know about. Consider adding it to the " + propertyFileNameOnDisk + " file."); + //Only use info level if it's for an extension + logger.log(byExtension ? Level.INFO : Level.FINE, fileKey + " is a filename/extension Dataverse doesn't know about. Consider adding it to the " + propertyFileNameOnDisk + " file."); return null; } } @@ -1828,4 +1869,16 @@ public static String getStorageDriver(DataFile dataFile) { public static String sanitizeFileName(String fileNameIn) { return fileNameIn == null ? null : fileNameIn.replace(' ', '_').replaceAll("[\\\\/:*?\"<>|,;]", ""); } + + public static Path createDirStructure(String rootDirectory, String... subdirectories) throws IOException { + Path path = Path.of(rootDirectory, subdirectories); + Files.createDirectories(path); + return path; + } + + public static boolean isFileOfImageType(File file) throws IOException { + Tika tika = new Tika(); + String mimeType = tika.detect(file); + return mimeType != null && mimeType.startsWith("image/"); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MarkupChecker.java b/src/main/java/edu/harvard/iq/dataverse/util/MarkupChecker.java index ef74819f073..02055ad60e9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/MarkupChecker.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/MarkupChecker.java @@ -1,8 +1,3 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - */ package edu.harvard.iq.dataverse.util; import org.apache.commons.text.StringEscapeUtils; @@ -11,56 +6,105 @@ import org.jsoup.parser.Parser; /** - * Wrapper for Jsoup clean - * + * Provides utility methods for sanitizing and processing HTML content. + *

+ * This class serves as a wrapper for the {@code Jsoup.clean} method and offers + * multiple configurations for cleaning HTML input. It also provides a method + * for escaping HTML entities and stripping all HTML tags. + *

+ * * @author rmp553 */ public class MarkupChecker { - - - + /** - * Wrapper around Jsoup clean method with the basic Safe list - * http://jsoup.org/cookbook/cleaning-html/safelist-sanitizer - * @param unsafe - * @return + * Sanitizes the provided HTML content using a customizable configuration. + *

+ * This method uses the {@code Jsoup.clean} method with a configurable {@code Safelist}. + * For more details, see the + * Jsoup SafeList Sanitizer. + *

+ *

+ * It supports preserving class attributes and optionally adding "noopener noreferrer nofollow" + * attributes to anchor tags to enhance security and usability. + *

+ * + * @param unsafe the HTML content to be sanitized; may contain unsafe or untrusted elements. + * @param keepClasses whether to preserve class attributes in the sanitized HTML. + * @param includeNoopenerNoreferrer whether to add "noopener noreferrer nofollow" to tags. + * @return a sanitized HTML string, free from potentially harmful content. */ - public static String sanitizeBasicHTML(String unsafe) { - + private static String sanitizeHTML(String unsafe, boolean keepClasses, boolean includeNoopenerNoreferrer) { if (unsafe == null) { return null; } - // basic includes: a, b, blockquote, br, cite, code, dd, dl, dt, em, i, li, ol, p, pre, q, small, span, strike, strong, sub, sup, u, ul - //Whitelist wl = Whitelist.basic().addTags("img", "h1", "h2", "h3", "kbd", "hr", "s", "del"); - Safelist sl = Safelist.basicWithImages().addTags("h1", "h2", "h3", "kbd", "hr", "s", "del", "map", "area").addAttributes("img", "usemap") - .addAttributes("map", "name").addAttributes("area", "shape", "coords", "href", "title", "alt") + // Create a base Safelist configuration + Safelist sl = Safelist.basicWithImages() + .addTags("h1", "h2", "h3", "kbd", "hr", "s", "del", "map", "area") + .addAttributes("img", "usemap") + .addAttributes("map", "name") + .addAttributes("area", "shape", "coords", "href", "title", "alt") .addEnforcedAttribute("a", "target", "_blank"); + // Add class attributes if requested + if (keepClasses) { + sl.addAttributes(":all", "class"); + } + + // Add "noopener noreferrer nofollow" to tags if requested + if (includeNoopenerNoreferrer) { + sl.addEnforcedAttribute("a", "rel", "noopener noreferrer nofollow"); + } + return Jsoup.clean(unsafe, sl); + } + /** + * Sanitizes the provided HTML content using a basic configuration. + * + * @param unsafe the HTML content to be sanitized; may contain unsafe or untrusted elements. + * @return a sanitized HTML string, free from potentially harmful content. + */ + public static String sanitizeBasicHTML(String unsafe) { + return sanitizeHTML(unsafe, false, false); } - + /** - * Strip all HTMl tags - * - * http://jsoup.org/apidocs/org/jsoup/safety/Safelist.html#none - * - * @param unsafe - * @return + * Sanitizes the provided HTML content using an advanced configuration. + *

+ * This configuration preserves class attributes and adds "noopener noreferrer nofollow" + * attributes to tags to enhance security and usability. + *

+ * + * @param unsafe the HTML content to be sanitized; may contain unsafe or untrusted elements. + * @return a sanitized HTML string, free from potentially harmful content. */ - public static String stripAllTags(String unsafe) { + public static String sanitizeAdvancedHTML(String unsafe) { + return sanitizeHTML(unsafe, true, true); + } + /** + * Removes all HTML tags from the provided content, leaving only plain text. + * + * @param unsafe the HTML content to process; may contain HTML tags. + * @return the plain text content with all HTML tags removed, or {@code null} if the input is {@code null}. + */ + public static String stripAllTags(String unsafe) { if (unsafe == null) { return null; } return Parser.unescapeEntities(Jsoup.clean(unsafe, Safelist.none()), true); - } - + + /** + * Escapes special characters in the provided string into their corresponding HTML entities. + * + * @param unsafe the string to escape; may contain special characters. + * @return a string with HTML entities escaped. + */ public static String escapeHtml(String unsafe) { - return StringEscapeUtils.escapeHtml4(unsafe); + return StringEscapeUtils.escapeHtml4(unsafe); } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java b/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java index 2b54f7a3bfe..345a2d3cccc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java @@ -8,6 +8,7 @@ import java.util.Date; import java.util.ArrayList; import java.util.List; +import java.util.stream.Collectors; import java.util.zip.ZipFile; import java.util.HashMap; import java.util.*; @@ -561,7 +562,7 @@ private boolean isShapefileExtension(String ext_name){ if (ext_name == null){ return false; } - return SHAPEFILE_ALL_EXTENSIONS.contains(ext_name); + return SHAPEFILE_ALL_EXTENSIONS.contains(ext_name.toLowerCase()); } /* Does a list of file extensions match those required for a shapefile set? @@ -570,7 +571,10 @@ private boolean doesListContainShapefileExtensions(List ext_list){ if (ext_list == null){ return false; } - return ext_list.containsAll(SHAPEFILE_MANDATORY_EXTENSIONS); + var lowerCaseExtensions = ext_list.stream() + .map(String::toLowerCase) + .toList(); + return lowerCaseExtensions.containsAll(SHAPEFILE_MANDATORY_EXTENSIONS); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java index b6f8870aa2d..8bebcf4d438 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java @@ -16,6 +16,7 @@ Two configurable options allow changing the limit for the number of authors or d import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.dataset.DatasetUtil; +import edu.harvard.iq.dataverse.export.ExportService; import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObjectBuilder; @@ -28,6 +29,8 @@ Two configurable options allow changing the limit for the number of authors or d import java.util.logging.Logger; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; +import io.gdcc.spi.export.ExportException; +import io.gdcc.spi.export.Exporter; public class SignpostingResources { private static final Logger logger = Logger.getLogger(SignpostingResources.class.getCanonicalName()); @@ -72,8 +75,17 @@ public String getLinks() { } String describedby = "<" + ds.getGlobalId().asURL().toString() + ">;rel=\"describedby\"" + ";type=\"" + "application/vnd.citationstyles.csl+json\""; - describedby += ",<" + systemConfig.getDataverseSiteUrl() + "/api/datasets/export?exporter=schema.org&persistentId=" - + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() + ">;rel=\"describedby\"" + ";type=\"application/ld+json\""; + ExportService instance = ExportService.getInstance(); + for (String[] labels : instance.getExportersLabels()) { + String formatName = labels[1]; + Exporter exporter; + try { + exporter = ExportService.getInstance().getExporter(formatName); + describedby += ",<" + getExporterUrl(formatName, ds) + ">;rel=\"describedby\"" + ";type=\"" + exporter.getMediaType() + "\""; + } catch (ExportException ex) { + logger.warning("Could not look up exporter based on " + formatName + ". Exception: " + ex); + } + } valueList.add(describedby); String type = ";rel=\"type\""; @@ -85,7 +97,7 @@ public String getLinks() { String linkset = "<" + systemConfig.getDataverseSiteUrl() + "/api/datasets/:persistentId/versions/" + workingDatasetVersion.getVersionNumber() + "." + workingDatasetVersion.getMinorVersionNumber() - + "/linkset?persistentId=" + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() + "> ; rel=\"linkset\";type=\"application/linkset+json\""; + + "/linkset?persistentId=" + ds.getGlobalId().asString() + "> ; rel=\"linkset\";type=\"application/linkset+json\""; valueList.add(linkset); logger.fine(String.format("valueList is: %s", valueList)); @@ -95,7 +107,7 @@ public String getLinks() { public JsonArrayBuilder getJsonLinkset() { Dataset ds = workingDatasetVersion.getDataset(); GlobalId gid = ds.getGlobalId(); - String landingPage = systemConfig.getDataverseSiteUrl() + "/dataset.xhtml?persistentId=" + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier(); + String landingPage = systemConfig.getDataverseSiteUrl() + "/dataset.xhtml?persistentId=" + ds.getGlobalId().asString(); JsonArrayBuilder authors = getJsonAuthors(getAuthorURLs(false)); JsonArrayBuilder items = getJsonItems(); @@ -112,15 +124,24 @@ public JsonArrayBuilder getJsonLinkset() { ) ); - mediaTypes.add( - jsonObjectBuilder().add( - "href", - systemConfig.getDataverseSiteUrl() + "/api/datasets/export?exporter=schema.org&persistentId=" + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() - ).add( - "type", - "application/ld+json" - ) - ); + ExportService instance = ExportService.getInstance(); + for (String[] labels : instance.getExportersLabels()) { + String formatName = labels[1]; + Exporter exporter; + try { + exporter = ExportService.getInstance().getExporter(formatName); + mediaTypes.add( + jsonObjectBuilder().add( + "href", getExporterUrl(formatName, ds) + ).add( + "type", + exporter.getMediaType() + ) + ); + } catch (ExportException ex) { + logger.warning("Could not look up exporter based on " + formatName + ". Exception: " + ex); + } + } JsonArrayBuilder linksetJsonObj = Json.createArrayBuilder(); JsonObjectBuilder mandatory; @@ -274,4 +295,9 @@ private String getPublicDownloadUrl(DataFile dataFile) { return FileUtil.getPublicDownloadUrl(systemConfig.getDataverseSiteUrl(), ((gid != null) ? gid.asString() : null), dataFile.getId()); } + + private String getExporterUrl(String formatName, Dataset ds) { + return systemConfig.getDataverseSiteUrl() + + "/api/datasets/export?exporter=" + formatName + "&persistentId=" + ds.getGlobalId().asString(); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index e769cacfdb1..5a78ee97ce2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -1173,4 +1173,8 @@ public String getRateLimitsJson() { public String getRateLimitingDefaultCapacityTiers() { return settingsService.getValueForKey(SettingsServiceBean.Key.RateLimitingDefaultCapacityTiers, ""); } + + public long getContactFeedbackMessageSizeLimit() { + return settingsService.getValueForKeyAsLong(SettingsServiceBean.Key.ContactFeedbackMessageSizeLimit, 0L); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java index 60ab9407269..39573416db9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java @@ -444,39 +444,45 @@ public static JsonValue getJsonLDForField(DatasetField field, Boolean excludeEma for (DatasetField dsf : dscv.getChildDatasetFields()) { DatasetFieldType dsft = dsf.getDatasetFieldType(); - if (excludeEmail && DatasetFieldType.FieldType.EMAIL.equals(dsft.getFieldType())) { - continue; - } - // which may have multiple values - if (!dsf.isEmpty()) { - // Add context entry - // ToDo - also needs to recurse here? - JsonLDTerm subFieldName = dsft.getJsonLDTerm(); - if (subFieldName.inNamespace()) { - localContext.putIfAbsent(subFieldName.getNamespace().getPrefix(), - subFieldName.getNamespace().getUrl()); - } else { - localContext.putIfAbsent(subFieldName.getLabel(), subFieldName.getUrl()); + JsonLDTerm subFieldName = dsft.getJsonLDTerm(); + + if (dsft.isCompound()) { + JsonValue compoundChildVals = getJsonLDForField(dsf, excludeEmail, cvocMap, localContext); + child.add(subFieldName.getLabel(), compoundChildVals); + } else { + if (excludeEmail && DatasetFieldType.FieldType.EMAIL.equals(dsft.getFieldType())) { + continue; } + // which may have multiple values + if (!dsf.isEmpty()) { + // Add context entry + // ToDo - also needs to recurse here? + if (subFieldName.inNamespace()) { + localContext.putIfAbsent(subFieldName.getNamespace().getPrefix(), + subFieldName.getNamespace().getUrl()); + } else { + localContext.putIfAbsent(subFieldName.getLabel(), subFieldName.getUrl()); + } - List values = dsf.getValues_nondisplay(); + List values = dsf.getValues_nondisplay(); - JsonArrayBuilder childVals = Json.createArrayBuilder(); + JsonArrayBuilder childVals = Json.createArrayBuilder(); - for (String val : dsf.getValues_nondisplay()) { - logger.fine("Child name: " + dsft.getName()); - if (cvocMap.containsKey(dsft.getId())) { - logger.fine("Calling addcvocval for: " + dsft.getName()); - addCvocValue(val, childVals, cvocMap.get(dsft.getId()), localContext); + for (String val : dsf.getValues_nondisplay()) { + logger.fine("Child name: " + dsft.getName()); + if (cvocMap.containsKey(dsft.getId())) { + logger.fine("Calling addcvocval for: " + dsft.getName()); + addCvocValue(val, childVals, cvocMap.get(dsft.getId()), localContext); + } else { + childVals.add(val); + } + } + if (values.size() > 1) { + child.add(subFieldName.getLabel(), childVals); } else { - childVals.add(val); + child.add(subFieldName.getLabel(), childVals.build().get(0)); } } - if (values.size() > 1) { - child.add(subFieldName.getLabel(), childVals); - } else { - child.add(subFieldName.getLabel(), childVals.build().get(0)); - } } } vals.add(child); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java index 36b2b35b48f..c27d6f8a559 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBean.java @@ -52,6 +52,9 @@ public boolean checkRate(User user, Command command) { int capacity = RateLimitUtil.getCapacity(systemConfig, user, action); if (capacity == RateLimitUtil.NO_LIMIT) { return true; + } else if (capacity == RateLimitUtil.RESET_CACHE) { + rateLimitCache.clear(); + return true; } else { String cacheKey = RateLimitUtil.generateCacheKey(user, action); return (!RateLimitUtil.rateLimited(rateLimitCache, cacheKey, capacity)); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java index b566cd42fe1..572ea8d5601 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtil.java @@ -25,6 +25,8 @@ public class RateLimitUtil { static final List rateLimits = new CopyOnWriteArrayList<>(); static final Map rateLimitMap = new ConcurrentHashMap<>(); public static final int NO_LIMIT = -1; + public static final int RESET_CACHE = -2; + static String settingRateLimitsJson = ""; static String generateCacheKey(final User user, final String action) { return (user != null ? user.getIdentifier() : GuestUser.get().getIdentifier()) + @@ -34,6 +36,15 @@ static int getCapacity(SystemConfig systemConfig, User user, String action) { if (user != null && user.isSuperuser()) { return NO_LIMIT; } + + // If the setting changes then reset the cache + if (!settingRateLimitsJson.equals(systemConfig.getRateLimitsJson())) { + settingRateLimitsJson = systemConfig.getRateLimitsJson(); + logger.fine("Setting RateLimitingCapacityByTierAndAction changed (" + settingRateLimitsJson + "). Resetting cache"); + rateLimits.clear(); + return RESET_CACHE; + } + // get the capacity, i.e. calls per hour, from config return (user instanceof AuthenticatedUser authUser) ? getCapacityByTierAndAction(systemConfig, authUser.getRateLimitTier(), action) : diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 8ba3ee177e8..f155eec4619 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -17,6 +17,7 @@ import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.dataaccess.DataAccess; +import edu.harvard.iq.dataverse.dataset.DatasetType; import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.datavariable.CategoryMetadata; import edu.harvard.iq.dataverse.datavariable.DataVariable; @@ -25,6 +26,7 @@ import edu.harvard.iq.dataverse.datavariable.VariableCategory; import edu.harvard.iq.dataverse.datavariable.VariableMetadata; import edu.harvard.iq.dataverse.datavariable.VariableRange; +import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItem; import edu.harvard.iq.dataverse.license.License; import edu.harvard.iq.dataverse.globus.FileDetailsHolder; import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; @@ -401,6 +403,7 @@ public static JsonObjectBuilder json(Dataset ds, Boolean returnOwners) { .add("persistentUrl", ds.getPersistentURL()) .add("protocol", ds.getProtocol()) .add("authority", ds.getAuthority()) + .add("separator", ds.getSeparator()) .add("publisher", BrandingUtil.getInstallationBrandName()) .add("publicationDate", ds.getPublicationDateFormattedYYYYMMDD()) .add("storageIdentifier", ds.getStorageIdentifier()); @@ -423,11 +426,17 @@ public static JsonObjectBuilder json(FileDetailsHolder ds) { } public static JsonObjectBuilder json(DatasetVersion dsv, boolean includeFiles) { - return json(dsv, null, includeFiles, false); + return json(dsv, null, includeFiles, false,true); + } + public static JsonObjectBuilder json(DatasetVersion dsv, boolean includeFiles, boolean includeMetadataBlocks) { + return json(dsv, null, includeFiles, false, includeMetadataBlocks); } - public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, - boolean includeFiles, boolean returnOwners) { + boolean includeFiles, boolean returnOwners) { + return json( dsv, anonymizedFieldTypeNamesList, includeFiles, returnOwners,true); + } + public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, + boolean includeFiles, boolean returnOwners, boolean includeMetadataBlocks) { Dataset dataset = dsv.getDataset(); JsonObjectBuilder bld = jsonObjectBuilder() .add("id", dsv.getId()).add("datasetId", dataset.getId()) @@ -472,11 +481,12 @@ public static JsonObjectBuilder json(DatasetVersion dsv, List anonymized .add("sizeOfCollection", dsv.getTermsOfUseAndAccess().getSizeOfCollection()) .add("studyCompletion", dsv.getTermsOfUseAndAccess().getStudyCompletion()) .add("fileAccessRequest", dsv.getTermsOfUseAndAccess().isFileAccessRequest()); - - bld.add("metadataBlocks", (anonymizedFieldTypeNamesList != null) ? - jsonByBlocks(dsv.getDatasetFields(), anonymizedFieldTypeNamesList) - : jsonByBlocks(dsv.getDatasetFields()) - ); + if(includeMetadataBlocks) { + bld.add("metadataBlocks", (anonymizedFieldTypeNamesList != null) ? + jsonByBlocks(dsv.getDatasetFields(), anonymizedFieldTypeNamesList) + : jsonByBlocks(dsv.getDatasetFields()) + ); + } if(returnOwners){ bld.add("isPartOf", getOwnersFromDvObject(dataset)); } @@ -599,13 +609,13 @@ public static JsonObjectBuilder json(MetadataBlock block, List fie } public static JsonArrayBuilder json(List metadataBlocks, boolean returnDatasetFieldTypes, boolean printOnlyDisplayedOnCreateDatasetFieldTypes) { - return json(metadataBlocks, returnDatasetFieldTypes, printOnlyDisplayedOnCreateDatasetFieldTypes, null); + return json(metadataBlocks, returnDatasetFieldTypes, printOnlyDisplayedOnCreateDatasetFieldTypes, null, null); } - public static JsonArrayBuilder json(List metadataBlocks, boolean returnDatasetFieldTypes, boolean printOnlyDisplayedOnCreateDatasetFieldTypes, Dataverse ownerDataverse) { + public static JsonArrayBuilder json(List metadataBlocks, boolean returnDatasetFieldTypes, boolean printOnlyDisplayedOnCreateDatasetFieldTypes, Dataverse ownerDataverse, DatasetType datasetType) { JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); for (MetadataBlock metadataBlock : metadataBlocks) { - arrayBuilder.add(returnDatasetFieldTypes ? json(metadataBlock, printOnlyDisplayedOnCreateDatasetFieldTypes, ownerDataverse) : brief.json(metadataBlock)); + arrayBuilder.add(returnDatasetFieldTypes ? json(metadataBlock, printOnlyDisplayedOnCreateDatasetFieldTypes, ownerDataverse, datasetType) : brief.json(metadataBlock)); } return arrayBuilder; } @@ -633,16 +643,29 @@ public static JsonObject json(DatasetField dfv) { } public static JsonObjectBuilder json(MetadataBlock metadataBlock) { - return json(metadataBlock, false, null); + return json(metadataBlock, false, null, null); } - public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printOnlyDisplayedOnCreateDatasetFieldTypes, Dataverse ownerDataverse) { + public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printOnlyDisplayedOnCreateDatasetFieldTypes, Dataverse ownerDataverse, DatasetType datasetType) { JsonObjectBuilder jsonObjectBuilder = jsonObjectBuilder() .add("id", metadataBlock.getId()) .add("name", metadataBlock.getName()) .add("displayName", metadataBlock.getDisplayName()) .add("displayOnCreate", metadataBlock.isDisplayOnCreate()); + List datasetFieldTypesList; + + if (ownerDataverse != null) { + datasetFieldTypesList = datasetFieldService.findAllInMetadataBlockAndDataverse( + metadataBlock, ownerDataverse, printOnlyDisplayedOnCreateDatasetFieldTypes, datasetType); + } else { + datasetFieldTypesList = printOnlyDisplayedOnCreateDatasetFieldTypes + ? datasetFieldService.findAllDisplayedOnCreateInMetadataBlock(metadataBlock) + : metadataBlock.getDatasetFieldTypes(); + } + + Set datasetFieldTypes = filterOutDuplicateDatasetFieldTypes(datasetFieldTypesList); + JsonObjectBuilder fieldsBuilder = Json.createObjectBuilder(); Predicate isNoChild = element -> element.isChild() == false; @@ -672,6 +695,17 @@ public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printO return jsonObjectBuilder; } + // This will remove datasetFieldTypes that are in the list but also a child of another datasetFieldType in the list + // Prevents duplicate datasetFieldType information from being returned twice + // See: https://github.com/IQSS/dataverse/issues/10472 + private static Set filterOutDuplicateDatasetFieldTypes(List datasetFieldTypesList) { + // making a copy of the list as to not damage the original when we remove items + List datasetFieldTypes = new ArrayList<>(datasetFieldTypesList); + // exclude/remove datasetFieldTypes if datasetFieldType exists as a child of another datasetFieldType + datasetFieldTypesList.forEach(dsft -> dsft.getChildDatasetFieldTypes().forEach(c -> datasetFieldTypes.remove(c))); + return new TreeSet<>(datasetFieldTypes); + } + public static JsonArrayBuilder jsonDatasetFieldTypes(List fields) { JsonArrayBuilder fieldsJson = Json.createArrayBuilder(); for (DatasetFieldType field : fields) { @@ -1427,4 +1461,21 @@ private static JsonObjectBuilder jsonDataverseInputLevel(DataverseFieldTypeInput jsonObjectBuilder.add("include", inputLevel.isInclude()); return jsonObjectBuilder; } + + public static JsonArrayBuilder jsonDataverseFeaturedItems(List dataverseFeaturedItems) { + JsonArrayBuilder featuredItemsArrayBuilder = Json.createArrayBuilder(); + for (DataverseFeaturedItem dataverseFeaturedItem : dataverseFeaturedItems) { + featuredItemsArrayBuilder.add(json(dataverseFeaturedItem)); + } + return featuredItemsArrayBuilder; + } + + public static JsonObjectBuilder json(DataverseFeaturedItem dataverseFeaturedItem) { + return jsonObjectBuilder() + .add("id", dataverseFeaturedItem.getId()) + .add("content", dataverseFeaturedItem.getContent()) + .add("imageFileName", dataverseFeaturedItem.getImageFileName()) + .add("imageFileUrl", dataverseFeaturedItem.getImageFileUrl()) + .add("displayOrder", dataverseFeaturedItem.getDisplayOrder()); + } } diff --git a/src/main/java/propertyFiles/3dobjects.properties b/src/main/java/propertyFiles/3dobjects.properties new file mode 100644 index 00000000000..97b8b0698dc --- /dev/null +++ b/src/main/java/propertyFiles/3dobjects.properties @@ -0,0 +1,74 @@ +metadatablock.name=3dobjects +metadatablock.displayName=3D Objects Metadata +metadatablock.displayFacet= +datasetfieldtype.3d3DTechnique.title=3D Technique +datasetfieldtype.3dEquipment.title=Equipment +datasetfieldtype.3dLightingSetup.title=Lighting Setup +datasetfieldtype.3dMasterFilePolygonCount.title=Master File Polygon Count +datasetfieldtype.3dExportedFilePolygonCount.title=Exported File Polygon Count +datasetfieldtype.3dExportedFileFormat.title=Exported File Format +datasetfieldtype.3dAltText.title=Alt-Text +datasetfieldtype.3dMaterialComposition.title=Material Composition +datasetfieldtype.3dObjectDimensions.title=Object Dimensions +datasetfieldtype.3dLength.title=Length +datasetfieldtype.3dWidth.title=Width +datasetfieldtype.3dHeight.title=Height +datasetfieldtype.3dWeight.title=Weight +datasetfieldtype.3dUnit.title=Unit +datasetfieldtype.3dHandling.title=Instructions +datasetfieldtype.3d3DTechnique.description=The technique used for capturing the 3D data +datasetfieldtype.3dEquipment.description=The equipment used for capturing the 3D data +datasetfieldtype.3dLightingSetup.description=The lighting used while capturing the 3D data +datasetfieldtype.3dMasterFilePolygonCount.description=The high-resolution polygon count +datasetfieldtype.3dExportedFilePolygonCount.description=The exported mesh polygon count +datasetfieldtype.3dExportedFileFormat.description=The format of the exported mesh +datasetfieldtype.3dAltText.description=A physical description of the object modeled +datasetfieldtype.3dMaterialComposition.description=The material used to create the object, e.g. stone +datasetfieldtype.3dObjectDimensions.description=The general measurements of the physical object +datasetfieldtype.3dLength.description=The rough length of the object +datasetfieldtype.3dWidth.description=The rough width of the object +datasetfieldtype.3dHeight.description=The rough height of the object +datasetfieldtype.3dWeight.description=The rough weight of the object +datasetfieldtype.3dUnit.description=The unit of measurement used for the object dimensions +datasetfieldtype.3dHandling.description=Safety and special handling instructions for the object +datasetfieldtype.3d3DTechnique.watermark= +datasetfieldtype.3dEquipment.watermark= +datasetfieldtype.3dLightingSetup.watermark= +datasetfieldtype.3dMasterFilePolygonCount.watermark= +datasetfieldtype.3dExportedFilePolygonCount.watermark= +datasetfieldtype.3dExportedFileFormat.watermark= +datasetfieldtype.3dAltText.watermark= +datasetfieldtype.3dMaterialComposition.watermark= +datasetfieldtype.3dObjectDimensions.watermark= +datasetfieldtype.3dLength.watermark= +datasetfieldtype.3dWidth.watermark= +datasetfieldtype.3dHeight.watermark= +datasetfieldtype.3dWeight.watermark= +datasetfieldtype.3dUnit.watermark= +datasetfieldtype.3dHandling.watermark= +controlledvocabulary.3d3DTechnique.ir_scanner=IR Scanner +controlledvocabulary.3d3DTechnique.laser=Laser +controlledvocabulary.3d3DTechnique.modelled=Modelled +controlledvocabulary.3d3DTechnique.photogrammetry=Photogrammetry +controlledvocabulary.3d3DTechnique.rti=RTI +controlledvocabulary.3d3DTechnique.structured_light=Structured Light +controlledvocabulary.3d3DTechnique.tomographic=Tomographic +controlledvocabulary.3d3DTechnique.other=Other +controlledvocabulary.3dLightingSetup.natural_light=Natural Light +controlledvocabulary.3dLightingSetup.lightbox=Lightbox +controlledvocabulary.3dLightingSetup.led=LED +controlledvocabulary.3dLightingSetup.fluorescent=Fluorescent +controlledvocabulary.3dLightingSetup.other=Other +controlledvocabulary.3dUnit.cm=cm +controlledvocabulary.3dUnit.m=m +controlledvocabulary.3dUnit.in=in +controlledvocabulary.3dUnit.ft=ft +controlledvocabulary.3dUnit.lbs=lbs +controlledvocabulary.3dExportedFileFormat..fbx=.fbx +controlledvocabulary.3dExportedFileFormat..glb=.glb +controlledvocabulary.3dExportedFileFormat..gltf=.gltf +controlledvocabulary.3dExportedFileFormat..obj=.obj +controlledvocabulary.3dExportedFileFormat..stl=.stl +controlledvocabulary.3dExportedFileFormat..usdz=.usdz +controlledvocabulary.3dExportedFileFormat..x3d=.x3d +controlledvocabulary.3dExportedFileFormat.other=other diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 503ff4ddd4b..1a2531c10a9 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -551,6 +551,9 @@ dashboard.card.harvestingserver.sets={0, choice, 0#Sets|1#Set|2#Sets} dashboard.card.harvestingserver.btn.manage=Manage Server dashboard.card.metadataexport.header=Metadata Export dashboard.card.metadataexport.message=Dataset metadata export is only available through the {0} API. Learn more in the {0} {1}API Guide{2}. +dashboard.card.move.data=Data +dashboard.card.move.dataset.manage=Move Dataset +dashboard.card.move.dataverse.manage=Move Dataverse #harvestclients.xhtml harvestclients.title=Manage Harvesting Clients @@ -645,6 +648,13 @@ harvestclients.viewEditDialog.archiveDescription.tip=Description of the archival harvestclients.viewEditDialog.archiveDescription.default.generic=This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data. harvestclients.viewEditDialog.btn.save=Save Changes harvestclients.newClientDialog.title.edit=Edit Group {0} +harvestclients.result.completed=Completed +harvestclients.result.completedWithFailures=Completed with failures +harvestclients.result.failure=FAILED +harvestclients.result.inProgess=IN PROGRESS +harvestclients.result.deleteInProgress=DELETE IN PROGRESS +harvestclients.result.interrupted=INTERRUPTED +harvestclients.result.details={0} harvested, {1} deleted, {2} failed. #harvestset.xhtml harvestserver.title=Manage Harvesting Server @@ -756,32 +766,45 @@ dashboard.list_users.toggleSuperuser.confirmationText.remove=Are you sure you wa dashboard.list_users.api.auth.invalid_apikey=The API key is invalid. dashboard.list_users.api.auth.not_superuser=Forbidden. You must be a superuser. -#dashboard-datamove.xhtml -dashboard.card.datamove=Data -dashboard.card.datamove.header=Dashboard - Move Data -dashboard.card.datamove.manage=Move Data -dashboard.card.datamove.message=Manage and curate your installation by moving datasets from one host dataverse to another. See also Managing Datasets and Dataverses in the Admin Guide. -dashboard.card.datamove.selectdataset.header=Dataset to Move -dashboard.card.datamove.newdataverse.header=New Host Dataverse -dashboard.card.datamove.dataset.label=Dataset -dashboard.card.datamove.dataverse.label=Dataverse -dashboard.card.datamove.confirm.dialog=Are you sure want to move this dataset? -dashboard.card.datamove.confirm.yes=Yes, Move Data -dashboard.card.datamove.message.success=The dataset "{0}" ({1}) has been successfully moved to {2}. -dashboard.card.datamove.message.failure.summary=Failed to moved dataset -dashboard.card.datamove.message.failure.details=The dataset "{0}" ({1}) could not be moved to {2}. {3}{4} -dashboard.card.datamove.dataverse.placeholder=Enter Dataverse Identifier... -dashboard.card.datamove.dataverse.menu.header=Dataverse Name (Affiliate), Identifier -dashboard.card.datamove.dataverse.menu.invalidMsg=No matches found -dashboard.card.datamove.dataset.placeholder=Enter Dataset Persistent ID, doi:... -dashboard.card.datamove.dataset.menu.header=Dataset Persistent ID, Title, Host Dataverse Identifier -dashboard.card.datamove.dataset.menu.invalidMsg=No matches found -dashboard.card.datamove.dataset.command.error.targetDataverseUnpublishedDatasetPublished=A published dataset may not be moved to an unpublished dataverse. You can retry the move after publishing {0}. -dashboard.card.datamove.dataset.command.error.targetDataverseSameAsOriginalDataverse=This dataset is already in this dataverse. -dashboard.card.datamove.dataset.command.error.unforced.datasetGuestbookNotInTargetDataverse=The guestbook would be removed from this dataset if you moved it because the guestbook is not in the new host dataverse. -dashboard.card.datamove.dataset.command.error.unforced.linkedToTargetDataverseOrOneOfItsParents=This dataset is linked to the new host dataverse or one of its parents. This move would remove the link to this dataset. -dashboard.card.datamove.dataset.command.error.unforced.suggestForce=Forcing this move is currently only available via API. Please see "Move a Dataset" under Managing Datasets and Dataverses in the Admin Guide for details. -dashboard.card.datamove.dataset.command.error.indexingProblem=Dataset could not be moved. Indexing failed. +#dashboard-movedataset.xhtml +dashboard.move.dataset.header=Dashboard - Move Data +dashboard.move.dataset.message=Manage and curate your installation by moving datasets from one host dataverse to another. See also Managing Datasets and Dataverses in the Admin Guide. +dashboard.move.dataset.selectdataset.header=Dataset to move +dashboard.move.dataset.newdataverse.header=New dataverse collection host +dashboard.move.dataset.dataset.label=Dataset +dashboard.move.dataset.dataverse.label=Dataverse +dashboard.move.dataset.confirm.dialog=Are you sure you want to move this dataset? +dashboard.move.dataset.confirm.yes=Yes, move this dataset +dashboard.move.dataset.message.success=The dataset "{0}" ({1}) has been successfully moved to {2}. +dashboard.move.dataset.message.failure.summary=Failed to moved dataset +dashboard.move.dataset.message.failure.details=The dataset "{0}" ({1}) could not be moved to {2}. {3}{4} +dashboard.move.dataset.dataverse.placeholder=Enter Dataverse Identifier... +dashboard.move.dataset.dataverse.menu.header=Dataverse Name (Affiliate), Identifier +dashboard.move.dataset.dataverse.menu.invalidMsg=No matches found +dashboard.move.dataset.placeholder=Enter Dataset Persistent ID, doi:... +dashboard.move.dataset.menu.header=Dataset Persistent ID, Title, Host Dataverse Identifier +dashboard.move.dataset.menu.invalidMsg=No matches found +dashboard.move.dataset.command.error.targetDataverseUnpublishedDatasetPublished=A published dataset may not be moved to an unpublished dataverse. You can retry the move after publishing {0}. +dashboard.move.dataset.command.error.targetDataverseSameAsOriginalDataverse=This dataset is already in this dataverse. +dashboard.move.dataset.command.error.unforced.datasetGuestbookNotInTargetDataverse=The guestbook would be removed from this dataset if you moved it because the guestbook is not in the new host dataverse. +dashboard.move.dataset.command.error.unforced.linkedToTargetDataverseOrOneOfItsParents=This dataset is linked to the new host dataverse or one of its parents. This move would remove the link to this dataset. +dashboard.move.dataset.command.error.unforced.suggestForce=Forcing this move is currently only available via API. Please see "Move a Dataset" under Managing Datasets and Dataverses in the Admin Guide for details. + +#dashboard-movedataverse.xhtml +dashboard.move.dataverse.header=Dashboard - Move Data +dashboard.move.dataverse.message.summary=Move Dataverse Collection +dashboard.move.dataverse.message.detail=Manage and curate your installation by moving a dataverse collection from one host dataverse collection to another. See also Managing Datasets and Dataverses in the Admin Guide. +dashboard.move.dataverse.selectdataverse.header=Dataverse collection to move +dashboard.move.dataverse.newdataverse.header=New dataverse collection host +dashboard.move.dataverse.label=Dataverse +dashboard.move.dataverse.confirm.dialog=Are you sure you want to move this dataverse collection? +dashboard.move.dataverse.confirm.yes=Yes, move this collection +dashboard.move.dataverse.message.success=The dataverse "{0}" has been successfully moved to {1}. +dashboard.move.dataverse.message.failure.summary=Failed to moved dataverse +dashboard.move.dataverse.message.failure.details=The dataverse "{0}" could not be moved to {1}. {2} +dashboard.move.dataverse.placeholder=Enter Dataverse Identifier... +dashboard.move.dataverse.menu.header=Dataverse Name (Affiliate), Identifier +dashboard.move.dataverse.menu.invalidMsg=No matches found #MailServiceBean.java notification.email.create.dataverse.subject={0}: Your dataverse has been created @@ -813,7 +836,7 @@ notification.email.greeting.html=Hello,
# Bundle file editors, please note that "notification.email.welcome" is used in a unit test notification.email.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the User Guide at {1}/{2}/user or contact {3} at {4} for assistance. notification.email.welcomeConfirmEmailAddOn=\n\nPlease verify your email address at {0} . Note, the verify link will expire after {1}. Send another verification email by visiting your account page. -notification.email.requestFileAccess=File access requested for dataset: {0} by {1} ({2}). Manage permissions at {3}. +notification.email.requestFileAccess=File access requested for dataset: {0} by {1} ({2}). Manage permissions at {3} . notification.email.requestFileAccess.guestbookResponse=

Guestbook Response:

{0} notification.email.grantFileAccess=Access granted for files in dataset: {0} (view at {1} ). notification.email.rejectFileAccess=Your request for access was rejected for the requested files in the dataset: {0} (view at {1} ). If you have any questions about why your request was rejected, you may reach the dataset owner using the "Contact" link on the upper right corner of the dataset page. @@ -986,8 +1009,17 @@ dataverse.inputlevels.error.cannotberequiredifnotincluded=The input level for th dataverse.facets.error.fieldtypenotfound=Can't find dataset field type '{0}' dataverse.facets.error.fieldtypenotfacetable=Dataset field type '{0}' is not facetable dataverse.metadatablocks.error.invalidmetadatablockname=Invalid metadata block name: {0} +dataverse.metadatablocks.error.containslistandinheritflag=Metadata block can not contain both {0} and {1}: true dataverse.create.error.jsonparse=Error parsing Json: {0} dataverse.create.error.jsonparsetodataverse=Error parsing the POSTed json into a dataverse: {0} +dataverse.create.featuredItem.error.imageFileProcessing=Error processing featured item file: {0} +dataverse.create.featuredItem.error.fileSizeExceedsLimit=File exceeds the maximum size of {0} +dataverse.create.featuredItem.error.invalidFileType=Invalid image file type +dataverse.create.featuredItem.error.contentShouldBeProvided=Featured item 'content' property should be provided and not empty. +dataverse.create.featuredItem.error.contentExceedsLengthLimit=Featured item content exceeds the maximum allowed length of {0} characters. +dataverse.update.featuredItems.error.missingInputParams=All input parameters (id, content, displayOrder, keepFile, fileName) are required. +dataverse.update.featuredItems.error.inputListsSizeMismatch=All input lists (id, content, displayOrder, keepFile, fileName) must have the same size. +dataverse.delete.featuredItems.success=All featured items of this Dataverse have been successfully deleted. # rolesAndPermissionsFragment.xhtml # advanced.xhtml @@ -1739,19 +1771,21 @@ dataset.transferUnrestricted=Click Continue to transfer the elligible files. dataset.requestAccessToRestrictedFiles=You may request access to any restricted file(s) by clicking the Request Access button. dataset.requestAccessToRestrictedFilesWithEmbargo=Embargoed files cannot be accessed during the embargo period. If your selection contains restricted files, you may request access to them by clicking the Request Access button. -dataset.privateurl.infoMessageAuthor=Privately share this dataset before it is published: {0} +dataset.privateurl.infoMessageAuthor=Privately share this draft dataset before it is published: {0} dataset.privateurl.infoMessageReviewer=You are viewing a preview of this unpublished dataset version. dataset.privateurl.header=Unpublished Dataset Preview URL dataset.privateurl.tip=To cite this data in publications, use the dataset's persistent ID instead of this URL. For more information about the Preview URL feature, please refer to the User Guide. -dataset.privateurl.onlyone=Only one Preview URL can be active for a single dataset. +dataset.privateurl.onlyone=Only one Preview URL can be active for a single draft dataset. dataset.privateurl.absent=Preview URL has not been created. dataset.privateurl.general.button.label=Create General Preview URL -dataset.privateurl.general.description=Create a URL that others can use to review this dataset version before it is published. They will be able to access all files in the dataset and see all metadata, including metadata that may identify the dataset's authors. +dataset.privateurl.general.description=Create a URL that others can use to review this draft dataset version before it is published. They will be able to access all files in the dataset and see all metadata, including metadata that may identify the dataset's authors. dataset.privateurl.general.title=General Preview dataset.privateurl.anonymous.title=Anonymous Preview +dataset.privateurl.anonymous.tooltip.preface=The following metadata fields will be hidden from the user of this Anonymous Preview URL: dataset.privateurl.anonymous.button.label=Create Anonymous Preview URL -dataset.privateurl.anonymous.description=Create a URL that others can use to access an anonymized view of this unpublished dataset version. Metadata that could identify the dataset author will not be displayed. Non-identifying metadata will be visible. +dataset.privateurl.anonymous.description=Create a URL that others can use to access an anonymized view of this unpublished dataset version. Metadata that could identify the dataset's author will not be displayed. (See Tool Tip for the list of withheld metadata fields.) Non-identifying metadata will be visible. dataset.privateurl.anonymous.description.paragraph.two=The dataset's files are not changed and users of the Anonymous Preview URL will be able to access them. Users of the Anonymous Preview URL will not be able to see the name of the Dataverse that this dataset is in but will be able to see the name of the repository, which might expose the dataset authors' identities. +dataset.privateurl.anonymous.description.paragraph.three=To verify that all identifying information has been removed or anonymized, it is recommended that you logout and review the dataset as as it would be seen by an Anonymous Preview URL user. See User Guide for more information. dataset.privateurl.createPrivateUrl=Create Preview URL dataset.privateurl.introduction=You can create a Preview URL to copy and share with others who will not need a repository account to review this unpublished dataset version. Once the dataset is published or if the URL is disabled, the URL will no longer work and will point to a "Page not found" page. dataset.privateurl.createPrivateUrl.anonymized=Create URL for Anonymized Access @@ -2793,13 +2827,14 @@ dataverses.api.delete.featured.collections.successful=Featured dataverses have b dataverses.api.move.dataverse.error.metadataBlock=Dataverse metadata block is not in target dataverse. dataverses.api.move.dataverse.error.dataverseLink=Dataverse is linked to target dataverse or one of its parents. dataverses.api.move.dataverse.error.datasetLink=Dataset is linked to target dataverse or one of its parents. -dataverses.api.move.dataverse.error.forceMove=Please use the parameter ?forceMove=true to complete the move. This will remove anything from the dataverse that is not compatible with the target dataverse. +dataverses.api.move.dataverse.error.forceMove=Please use the API and see "Move a Dataverse Collection" with the parameter ?forceMove=true to complete the move. This will remove anything from the dataverse that is not compatible with the target dataverse. dataverses.api.create.dataset.error.mustIncludeVersion=Please provide initial version in the dataset json dataverses.api.create.dataset.error.superuserFiles=Only a superuser may add files via this api dataverses.api.create.dataset.error.mustIncludeAuthorName=Please provide author name in the dataset json dataverses.api.validate.json.succeeded=The Dataset JSON provided is valid for this Dataverse Collection. dataverses.api.validate.json.failed=The Dataset JSON provided failed validation with the following error: dataverses.api.validate.json.exception=Validation failed with following exception: +dataverses.api.update.featured.items.error.onlyImageFilesAllowed=Invalid file type. Only image files are allowed. #Access.java access.api.allowRequests.failure.noDataset=Could not find Dataset with id: {0} @@ -3119,3 +3154,17 @@ bearerTokenAuthMechanism.errors.tokenValidatedButNoRegisteredUser=Bearer token i authenticationServiceBean.errors.unauthorizedBearerToken=Unauthorized bearer token. authenticationServiceBean.errors.invalidBearerToken=Could not parse bearer token. authenticationServiceBean.errors.bearerTokenDetectedNoOIDCProviderConfigured=Bearer token detected, no OIDC provider configured. + +#SendFeedbackAPI.java +sendfeedback.request.error.targetNotFound=Feedback target object not found. +sendfeedback.request.rateLimited=Too many requests to send feedback. +sendfeedback.body.error.exceedsLength=Body exceeds feedback length: {0} > {1}}. +sendfeedback.body.error.isEmpty=Body can not be empty. +sendfeedback.body.error.missingRequiredFields=Body missing required fields. +sendfeedback.fromEmail.error.missing=Missing fromEmail +sendfeedback.fromEmail.error.invalid=Invalid fromEmail: {0} + +#DataverseFeaturedItems.java +dataverseFeaturedItems.errors.notFound=Could not find dataverse featured item with identifier {0} +dataverseFeaturedItems.delete.successful=Successfully deleted dataverse featured item with identifier {0} + diff --git a/src/main/java/propertyFiles/citation.properties b/src/main/java/propertyFiles/citation.properties index 5899523da67..9a1e6f280ec 100644 --- a/src/main/java/propertyFiles/citation.properties +++ b/src/main/java/propertyFiles/citation.properties @@ -298,6 +298,7 @@ controlledvocabulary.contributorType.supervisor=Supervisor controlledvocabulary.contributorType.work_package_leader=Work Package Leader controlledvocabulary.contributorType.other=Other controlledvocabulary.authorIdentifierScheme.orcid=ORCID +controlledvocabulary.authorIdentifierScheme.ror=ROR controlledvocabulary.authorIdentifierScheme.isni=ISNI controlledvocabulary.authorIdentifierScheme.lcna=LCNA controlledvocabulary.authorIdentifierScheme.viaf=VIAF diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index b0bc92cf975..95f30b6ba1d 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -19,6 +19,8 @@ dataverse.files.directory=${STORAGE_DIR:/tmp/dataverse} dataverse.files.uploads=${STORAGE_DIR:${com.sun.aas.instanceRoot}}/uploads dataverse.files.docroot=${STORAGE_DIR:${com.sun.aas.instanceRoot}}/docroot dataverse.files.globus-cache-maxage=5 +dataverse.files.featured-items.image-maxsize=1000000 +dataverse.files.featured-items.image-uploads=featuredItems # SEARCH INDEX dataverse.solr.host=localhost diff --git a/src/main/resources/db/migration/V6.5.0.2.sql b/src/main/resources/db/migration/V6.5.0.2.sql new file mode 100644 index 00000000000..804ce3c1ea8 --- /dev/null +++ b/src/main/resources/db/migration/V6.5.0.2.sql @@ -0,0 +1,10 @@ +-- Fixes File Access Requests when upgrading from Dataverse 6.0 +-- See: https://github.com/IQSS/dataverse/issues/10714 +DELETE FROM fileaccessrequests +WHERE creation_time <> (SELECT MIN(creation_time) + FROM fileaccessrequests far2 + WHERE far2.datafile_id = fileaccessrequests.datafile_id + AND far2.authenticated_user_id = fileaccessrequests.authenticated_user_id + AND far2.request_state is NULL); + +UPDATE fileaccessrequests SET request_state='CREATED' WHERE request_state is NULL; diff --git a/src/main/resources/db/migration/V6.5.0.3.sql b/src/main/resources/db/migration/V6.5.0.3.sql new file mode 100644 index 00000000000..e2814139e3d --- /dev/null +++ b/src/main/resources/db/migration/V6.5.0.3.sql @@ -0,0 +1,2 @@ +-- #8739 map publisher tag to distributorName when harvesting +update foreignmetadatafieldmapping set datasetfieldname = 'distributorName' where foreignfieldxpath = ':publisher'; diff --git a/src/main/resources/db/migration/V6.5.0.4.sql b/src/main/resources/db/migration/V6.5.0.4.sql new file mode 100644 index 00000000000..9c3b24712e1 --- /dev/null +++ b/src/main/resources/db/migration/V6.5.0.4.sql @@ -0,0 +1,3 @@ +ALTER TABLE dvobject ADD COLUMN IF NOT EXISTS separator character varying(255) DEFAULT ''; + +UPDATE dvobject SET separator='/' WHERE protocol = 'doi' OR protocol = 'hdl'; \ No newline at end of file diff --git a/src/main/resources/db/migration/afterMigrate__1-7256-upsert-referenceData.sql b/src/main/resources/db/migration/afterMigrate__1-7256-upsert-referenceData.sql index 07e9b2c6266..f63fedba02f 100644 --- a/src/main/resources/db/migration/afterMigrate__1-7256-upsert-referenceData.sql +++ b/src/main/resources/db/migration/afterMigrate__1-7256-upsert-referenceData.sql @@ -31,7 +31,7 @@ INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname (15, 'affiliation', 'authorAffiliation', TRUE, 3, 1 ), (16, ':contributor', 'contributorName', FALSE, NULL, 1 ), (17, 'type', 'contributorType', TRUE, 16, 1 ), - (18, ':publisher', 'producerName', FALSE, NULL, 1 ), + (18, ':publisher', 'distributorName', FALSE, NULL, 1 ), (19, ':language', 'language', FALSE, NULL, 1 ) ON CONFLICT DO NOTHING; diff --git a/src/main/webapp/dashboard-datamove.xhtml b/src/main/webapp/dashboard-movedataset.xhtml similarity index 80% rename from src/main/webapp/dashboard-datamove.xhtml rename to src/main/webapp/dashboard-movedataset.xhtml index 7f8365c9be3..7bcd9d1c5ba 100644 --- a/src/main/webapp/dashboard-datamove.xhtml +++ b/src/main/webapp/dashboard-movedataset.xhtml @@ -13,40 +13,40 @@ - + - + - + - +
-
#{bundle['dashboard.card.datamove.selectdataset.header']}
+
#{bundle['dashboard.move.dataset.selectdataset.header']}
@@ -70,26 +70,26 @@
-
#{bundle['dashboard.card.datamove.newdataverse.header']}
+
#{bundle['dashboard.move.dataset.newdataverse.header']}
@@ -113,12 +113,12 @@
+ oncomplete="if (args && !args.validationFailed) PF('moveDatasetConfirmation').show();"> - + +
+ +
+ + + + + + + + + + + + +
+
+ +
+
+
+
+
+
#{bundle['dashboard.move.dataverse.newdataverse.header']}
+
+ + +
+ +
+ + + + + + + + + + + + +
+
+ +
+
+
+ +
+ + + + +
+ + +

#{bundle['dashboard.move.dataverse.confirm.dialog']}

+
+ + +
+
+ + + + + + diff --git a/src/main/webapp/dashboard.xhtml b/src/main/webapp/dashboard.xhtml index 5a72b52937b..5f083a7455f 100644 --- a/src/main/webapp/dashboard.xhtml +++ b/src/main/webapp/dashboard.xhtml @@ -126,21 +126,26 @@
-

#{bundle['dashboard.card.datamove']}

+

#{bundle['dashboard.card.move.data']}

- -

#{bundle['dataverses']}

+ +

#{bundle['datasets']}

- -

#{bundle['datasets']}

+ +

#{bundle['dataverses']}

- diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 9426884d349..b4454b75775 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1177,14 +1177,14 @@

-

#{bundle['dataset.privateurl.introduction']}

+

#{bundle['dataset.privateurl.introduction']} + #{bundle['dataset.privateurl.onlyone']}

-

-

#{bundle['dataset.privateurl.onlyone']}

+

@@ -1221,10 +1221,18 @@

-

+ + +

#{bundle['dataset.privateurl.anonymous.description']}

-

#{bundle['dataset.privateurl.anonymous.description.paragraph.two']} #{bundle['dataset.privateurl.createPrivateUrl.anonymized.unavailable']}.

- +

#{bundle['dataset.privateurl.anonymous.description.paragraph.two']}

+ + + + +

#{bundle['dataset.privateurl.createPrivateUrl.anonymized.unavailable']}

+ V1" + "LibraScholar" + "https://doi.org/10.5072/FK2/LK0D1H" + - "doi/10.5072/FK2/LK0D1H" + + "10.5072/FK2/LK0D1H" + "" + "" + ""; @@ -295,7 +295,7 @@ public void testToEndNoteString_withoutTitleAndAuthor() throws ParseException { "V1" + "LibraScholar" + "https://doi.org/10.5072/FK2/LK0D1H" + - "doi/10.5072/FK2/LK0D1H" + + "10.5072/FK2/LK0D1H" + "" + "" + ""; diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java index b753f534c6b..7320cf7acfe 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java @@ -157,6 +157,22 @@ public void testIsValidAuthorIdentifierGnd() { assertFalse(validator.isValidAuthorIdentifier("junk", pattern)); } + @Test + public void testIsValidAuthorIdentifierRor() { + DatasetFieldValueValidator validator = new DatasetFieldValueValidator(); + Pattern pattern = ExternalIdentifier.valueOf("ROR").getPattern(); + assertTrue(validator.isValidAuthorIdentifier("03vek6s52", pattern)); + assertFalse(validator.isValidAuthorIdentifier("junk", pattern)); + } + + @Test + public void testIsValidAuthorIdentifierRorFull() { + DatasetFieldValueValidator validator = new DatasetFieldValueValidator(); + Pattern pattern = ExternalIdentifier.valueOf("ROR_FULL_URL").getPattern(); + assertTrue(validator.isValidAuthorIdentifier("https://ror.org/03vek6s52", pattern)); + assertFalse(validator.isValidAuthorIdentifier("junk", pattern)); + } + final Validator validator = Validation.buildDefaultValidatorFactory().getValidator(); @ParameterizedTest diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java index 3cd03abeb38..d5c80cde1aa 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java @@ -3,8 +3,10 @@ import io.restassured.RestAssured; import io.restassured.response.Response; import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism; +import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.util.BundleUtil; +import io.restassured.path.json.JsonPath; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -44,12 +46,62 @@ public void testRetrieveMyDataAsJsonString() { assertEquals(prettyPrintError("dataretrieverAPI.user.not.found", Arrays.asList(badUserIdentifier)), invalidUserIdentifierResponse.prettyPrint()); assertEquals(OK.getStatusCode(), invalidUserIdentifierResponse.getStatusCode()); - // Call as superuser with valid user identifier + // Call as superuser with valid user identifier and no roles Response createSecondUserResponse = UtilIT.createRandomUser(); String userIdentifier = UtilIT.getUsernameFromResponse(createSecondUserResponse); Response validUserIdentifierResponse = UtilIT.retrieveMyDataAsJsonString(superUserApiToken, userIdentifier, emptyRoleIdsList); assertEquals(prettyPrintError("myDataFinder.error.result.no.role", null), validUserIdentifierResponse.prettyPrint()); assertEquals(OK.getStatusCode(), validUserIdentifierResponse.getStatusCode()); + + // Call as normal user with one valid role and no results + Response createNormalUserResponse = UtilIT.createRandomUser(); + String normalUserUsername = UtilIT.getUsernameFromResponse(createNormalUserResponse); + String normalUserApiToken = UtilIT.getApiTokenFromResponse(createNormalUserResponse); + Response noResultwithOneRoleResponse = UtilIT.retrieveMyDataAsJsonString(normalUserApiToken, "", new ArrayList<>(Arrays.asList(5L))); + assertEquals(prettyPrintError("myDataFinder.error.result.role.empty", Arrays.asList("Dataset Creator")), noResultwithOneRoleResponse.prettyPrint()); + assertEquals(OK.getStatusCode(), noResultwithOneRoleResponse.getStatusCode()); + + // Call as normal user with multiple valid roles and no results + Response noResultWithMultipleRoleResponse = UtilIT.retrieveMyDataAsJsonString(normalUserApiToken, "", new ArrayList<>(Arrays.asList(5L, 6L))); + assertEquals(prettyPrintError("myDataFinder.error.result.roles.empty", Arrays.asList("Dataset Creator, Contributor")), noResultWithMultipleRoleResponse.prettyPrint()); + assertEquals(OK.getStatusCode(), noResultWithMultipleRoleResponse.getStatusCode()); + + // Call as normal user with one valid dataset role and one dataset result + Response createDataverseResponse = UtilIT.createRandomDataverse(normalUserApiToken); + createDataverseResponse.prettyPrint(); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, normalUserApiToken); + createDatasetResponse.prettyPrint(); + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse); + UtilIT.sleepForReindex(datasetId.toString(), normalUserApiToken, 4); + Response oneDatasetResponse = UtilIT.retrieveMyDataAsJsonString(normalUserApiToken, "", new ArrayList<>(Arrays.asList(6L))); + assertEquals(OK.getStatusCode(), oneDatasetResponse.getStatusCode()); + JsonPath jsonPathOneDataset = oneDatasetResponse.getBody().jsonPath(); + assertEquals(1, jsonPathOneDataset.getInt("data.total_count")); + assertEquals(datasetId, jsonPathOneDataset.getInt("data.items[0].entity_id")); + + // Call as normal user with one valid dataverse role and one dataverse result + UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.DS_CONTRIBUTOR.toString(), + "@" + normalUserUsername, superUserApiToken); + Response oneDataverseResponse = UtilIT.retrieveMyDataAsJsonString(normalUserApiToken, "", new ArrayList<>(Arrays.asList(5L))); + assertEquals(OK.getStatusCode(), oneDataverseResponse.getStatusCode()); + JsonPath jsonPathOneDataverse = oneDataverseResponse.getBody().jsonPath(); + assertEquals(1, jsonPathOneDataverse.getInt("data.total_count")); + assertEquals(dataverseAlias, jsonPathOneDataverse.getString("data.items[0].name")); + + // Clean up + Response deleteDatasetResponse = UtilIT.deleteDatasetViaNativeApi(datasetId, normalUserApiToken); + deleteDatasetResponse.prettyPrint(); + assertEquals(200, deleteDatasetResponse.getStatusCode()); + + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, normalUserApiToken); + deleteDataverseResponse.prettyPrint(); + assertEquals(200, deleteDataverseResponse.getStatusCode()); + + Response deleteUserResponse = UtilIT.deleteUser(normalUserUsername); + deleteUserResponse.prettyPrint(); + assertEquals(200, deleteUserResponse.getStatusCode()); } private static String prettyPrintError(String resourceBundleKey, List params) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldsIT.java index ae90ddf0b4c..b70ef04d4c0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldsIT.java @@ -19,7 +19,7 @@ public static void setUpClass() { void testListAllFacetableDatasetFields() { Response listAllFacetableDatasetFieldsResponse = UtilIT.listAllFacetableDatasetFields(); listAllFacetableDatasetFieldsResponse.then().assertThat().statusCode(OK.getStatusCode()); - int expectedNumberOfFacetableDatasetFields = 59; + int expectedNumberOfFacetableDatasetFields = 64; listAllFacetableDatasetFieldsResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].name", equalTo("authorName")) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java index a0b9f5325d0..7c73498dead 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetTypesIT.java @@ -12,34 +12,40 @@ import java.util.UUID; import org.hamcrest.CoreMatchers; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.nullValue; import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; public class DatasetTypesIT { + final static String INSTRUMENT = "instrument"; + @BeforeAll public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); - Response getSoftwareType = UtilIT.getDatasetType(DatasetType.DATASET_TYPE_SOFTWARE); - getSoftwareType.prettyPrint(); - - String typeFound = JsonPath.from(getSoftwareType.getBody().asString()).getString("data.name"); - System.out.println("type found: " + typeFound); - if (DatasetType.DATASET_TYPE_SOFTWARE.equals(typeFound)) { - return; - } - - System.out.println("The \"software\" type wasn't found. Create it."); Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String username = UtilIT.getUsernameFromResponse(createUser); String apiToken = UtilIT.getApiTokenFromResponse(createUser); UtilIT.setSuperuserStatus(username, true).then().assertThat().statusCode(OK.getStatusCode()); - String jsonIn = Json.createObjectBuilder().add("name", DatasetType.DATASET_TYPE_SOFTWARE).build().toString(); + ensureDatasetTypeIsPresent(DatasetType.DATASET_TYPE_SOFTWARE, apiToken); + ensureDatasetTypeIsPresent(INSTRUMENT, apiToken); + } + private static void ensureDatasetTypeIsPresent(String datasetType, String apiToken) { + Response getDatasetType = UtilIT.getDatasetType(datasetType); + getDatasetType.prettyPrint(); + String typeFound = JsonPath.from(getDatasetType.getBody().asString()).getString("data.name"); + System.out.println("type found: " + typeFound); + if (datasetType.equals(typeFound)) { + return; + } + System.out.println("The " + datasetType + "type wasn't found. Create it."); + String jsonIn = Json.createObjectBuilder().add("name", datasetType).build().toString(); Response typeAdded = UtilIT.addDatasetType(jsonIn, apiToken); typeAdded.prettyPrint(); typeAdded.then().assertThat().statusCode(OK.getStatusCode()); @@ -265,4 +271,224 @@ public void testAddAndDeleteDatasetType() { } + @Test + public void testUpdateDatasetTypeLinksWithMetadataBlocks() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + UtilIT.setSuperuserStatus(username, true).then().assertThat().statusCode(OK.getStatusCode()); + + System.out.println("listing root collection blocks with display on create: only citation"); + Response listBlocks = UtilIT.listMetadataBlocks(":root", true, false, apiToken); + listBlocks.prettyPrint(); + listBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[1].name", nullValue()); + + System.out.println("listing root collection blocks without display on create: only citation"); + listBlocks = UtilIT.listMetadataBlocks(":root", false, false, apiToken); + listBlocks.prettyPrint(); + listBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[1].name", nullValue()); + + //Avoid all-numeric names (which are not allowed) + String randomName = "zzz" + UUID.randomUUID().toString().substring(0, 8); + String jsonIn = Json.createObjectBuilder().add("name", randomName).build().toString(); + + System.out.println("adding type with name " + randomName); + Response typeAdded = UtilIT.addDatasetType(jsonIn, apiToken); + typeAdded.prettyPrint(); + typeAdded.then().assertThat().statusCode(OK.getStatusCode()); + + Long typeId = JsonPath.from(typeAdded.getBody().asString()).getLong("data.id"); + + System.out.println("id of type: " + typeId); + Response getTypeById = UtilIT.getDatasetType(typeId.toString()); + getTypeById.prettyPrint(); + getTypeById.then().assertThat().statusCode(OK.getStatusCode()); + + String metadataBlockToLink = """ + ["geospatial"] +"""; + + Response linkDatasetType1ToGeospatial = UtilIT.updateDatasetTypeLinksWithMetadataBlocks(randomName, metadataBlockToLink, apiToken); + linkDatasetType1ToGeospatial.prettyPrint(); + linkDatasetType1ToGeospatial.then().assertThat(). + statusCode(OK.getStatusCode()) + .body("data.linkedMetadataBlocks.after[0]", CoreMatchers.is("geospatial")); + + getTypeById = UtilIT.getDatasetType(typeId.toString()); + getTypeById.prettyPrint(); + getTypeById.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.linkedMetadataBlocks[0]", CoreMatchers.is("geospatial")); + + System.out.println("listing root collection blocks with display on create"); + listBlocks = UtilIT.listMetadataBlocks(":root", true, false, randomName, apiToken); + listBlocks.prettyPrint(); + listBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[1].name", is("geospatial")) + .body("data[2].name", nullValue()); + + System.out.println("listing root collection blocks without display on create"); + listBlocks = UtilIT.listMetadataBlocks(":root", false, false, randomName, apiToken); + listBlocks.prettyPrint(); + listBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[1].name", is("geospatial")) + .body("data[2].name", nullValue()); + + Response createDataverse = UtilIT.createRandomDataverse(apiToken); + createDataverse.then().assertThat().statusCode(CREATED.getStatusCode()); + + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverse); + + UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); + + System.out.println("listing " + dataverseAlias + " collection blocks with display on create using dataset type " + randomName); + listBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, false, randomName, apiToken); + listBlocks.prettyPrint(); + listBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[1].name", is("geospatial")) + .body("data[2].name", nullValue()); + + System.out.println("listing " + dataverseAlias + " collection blocks without display on create using dataset type " + randomName); + listBlocks = UtilIT.listMetadataBlocks(dataverseAlias, false, false, randomName, apiToken); + listBlocks.prettyPrint(); + listBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[1].name", is("geospatial")) + .body("data[2].name", nullValue()); + + System.out.println("listing " + dataverseAlias + " collection blocks and inner dataset field types, without display on create and return dataset field types set to true using dataset type " + randomName); + listBlocks = UtilIT.listMetadataBlocks(dataverseAlias, false, true, randomName, apiToken); + listBlocks.prettyPrint(); + listBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[1].name", is("geospatial")) + .body("data[0].fields.size()", is(35)) + .body("data[1].fields.size()", is(3)); + + System.out.println("listing " + dataverseAlias + " collection blocks and inner dataset field types, with display on create and return dataset field types set to true using dataset type " + randomName); + listBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, randomName, apiToken); + listBlocks.prettyPrint(); + listBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[1].name", is("geospatial")) + .body("data[0].fields.size()", is(10)) + .body("data[1].fields.size()", is(0)); // There are no fields required or with displayOnCreate=true in geospatial.tsv + + // We send an empty array to mean "delete or clear all" + String emptyJsonArray = "[]"; + Response removeDatasetTypeLinks = UtilIT.updateDatasetTypeLinksWithMetadataBlocks(randomName, emptyJsonArray, apiToken); + removeDatasetTypeLinks.prettyPrint(); + removeDatasetTypeLinks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.linkedMetadataBlocks.after[0]", CoreMatchers.nullValue()); + + listBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, false, randomName, apiToken); + listBlocks.prettyPrint(); + listBlocks.then().assertThat() + .body("data[0].name", is("citation")); + } + + @Test + public void testLinkInstrumentToAstro() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + UtilIT.setSuperuserStatus(username, true).then().assertThat().statusCode(OK.getStatusCode()); + + String metadataBlockLink = """ + ["astrophysics"] +//"""; + + String datasetType = "instrument"; + Response linkInstrumentToAstro = UtilIT.updateDatasetTypeLinksWithMetadataBlocks(datasetType, metadataBlockLink, apiToken); + linkInstrumentToAstro.prettyPrint(); + linkInstrumentToAstro.then().assertThat(). + statusCode(OK.getStatusCode()) + .body("data.linkedMetadataBlocks.after[0]", CoreMatchers.is("astrophysics")); + + Response createDataverse = UtilIT.createRandomDataverse(apiToken); + createDataverse.then().assertThat().statusCode(CREATED.getStatusCode()); + + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverse); + + UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); + + // displayOnCreate will only be true for fields that are set this way in the database. + // We set it here so we can make assertions below. + UtilIT.setDisplayOnCreate("astroInstrument", true); + + Response listBlocks = null; + System.out.println("listing root collection blocks with display on create using dataset type " + datasetType); + listBlocks = UtilIT.listMetadataBlocks(":root", true, true, datasetType, apiToken); + listBlocks.prettyPrint(); + listBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[1].name", is("astrophysics")) + .body("data[2].name", nullValue()) + .body("data[0].fields.title.displayOnCreate", equalTo(true)) + .body("data[1].fields.astroInstrument.displayOnCreate", equalTo(true)); + + System.out.println("listing root collection blocks with all fields (not display on create) using dataset type " + datasetType); + listBlocks = UtilIT.listMetadataBlocks(":root", false, true, datasetType, apiToken); + listBlocks.prettyPrint(); + listBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[1].name", is("astrophysics")) + .body("data[2].name", nullValue()) + .body("data[0].fields.title.displayOnCreate", equalTo(true)) + .body("data[0].fields.subtitle.displayOnCreate", equalTo(false)) + .body("data[1].fields.astroInstrument.displayOnCreate", equalTo(true)) + .body("data[1].fields.astroObject.displayOnCreate", equalTo(false)); + + System.out.println("listing " + dataverseAlias + " collection blocks with display on create using dataset type " + datasetType); + listBlocks = UtilIT.listMetadataBlocks(dataverseAlias, true, true, datasetType, apiToken); + listBlocks.prettyPrint(); + listBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[1].name", is("astrophysics")) + .body("data[2].name", nullValue()) + .body("data[0].fields.title.displayOnCreate", equalTo(true)) + // subtitle is hidden because it is not "display on create" + .body("data[0].fields.subtitle", nullValue()) + .body("data[1].fields.astroInstrument.displayOnCreate", equalTo(true)) + // astroObject is hidden because it is not "display on create" + .body("data[1].fields.astroObject", nullValue()); + + System.out.println("listing " + dataverseAlias + " collection blocks with all fields (not display on create) using dataset type " + datasetType); + listBlocks = UtilIT.listMetadataBlocks(dataverseAlias, false, true, datasetType, apiToken); + listBlocks.prettyPrint(); + listBlocks.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].name", is("citation")) + .body("data[1].name", is("astrophysics")) + .body("data[2].name", nullValue()) + .body("data[0].fields.title.displayOnCreate", equalTo(true)) + .body("data[0].fields.subtitle.displayOnCreate", equalTo(false)) + .body("data[1].fields.astroInstrument.displayOnCreate", equalTo(true)) + .body("data[1].fields.astroObject.displayOnCreate", equalTo(false)); + + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 1b2d7e9a431..c5241c82585 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -26,6 +26,7 @@ import jakarta.json.JsonArray; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonArrayBuilder; import jakarta.ws.rs.core.Response.Status; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.StringUtils; @@ -368,6 +369,14 @@ public void testAddUpdateDatasetViaNativeAPI() { createDataverseResponse.prettyPrint(); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + JsonArrayBuilder metadataBlocks = Json.createArrayBuilder(); + metadataBlocks.add("citation"); + metadataBlocks.add("journal"); + metadataBlocks.add("socialscience"); + Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, metadataBlocks, apiToken); + setMetadataBlocksResponse.prettyPrint(); + setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); createDatasetResponse.prettyPrint(); Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse); @@ -411,7 +420,20 @@ public void testAddUpdateDatasetViaNativeAPI() { String responseString = addSubjectSingleViaNative.prettyPrint(); addSubjectSingleViaNative.then().assertThat() .statusCode(OK.getStatusCode()).body(containsString("Mathematical Sciences")).body(containsString("Social Sciences")); - + + + String pathToJsonFileSingleCvoc = "doc/sphinx-guides/source/_static/api/dataset-add-single-cvoc-field-metadata.json"; + Response addSingleCvocViaNative = UtilIT.updateFieldLevelDatasetMetadataViaNative(datasetPersistentId, pathToJsonFileSingleCvoc, apiToken); + addSingleCvocViaNative.prettyPrint(); + addSingleCvocViaNative.then().assertThat() + .statusCode(OK.getStatusCode()); + + + String pathToJsonFileSingleCompound = "doc/sphinx-guides/source/_static/api/dataset-add-single-compound-field-metadata.json"; + Response addSingleCompoundViaNative = UtilIT.updateFieldLevelDatasetMetadataViaNative(datasetPersistentId, pathToJsonFileSingleCompound, apiToken); + addSingleCompoundViaNative.prettyPrint(); + addSingleCompoundViaNative.then().assertThat() + .statusCode(OK.getStatusCode()); //Trying to blank out required field should fail... @@ -425,9 +447,10 @@ public void testAddUpdateDatasetViaNativeAPI() { Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); assertEquals(200, publishDataset.getStatusCode()); + UtilIT.sleepForLock(datasetPersistentId, "finalizePublication", apiToken, UtilIT.MAXIMUM_PUBLISH_LOCK_DURATION); //post publish update String pathToJsonFilePostPub= "doc/sphinx-guides/source/_static/api/dataset-add-metadata-after-pub.json"; - Response addDataToPublishedVersion = UtilIT.addDatasetMetadataViaNative(datasetPersistentId, pathToJsonFilePostPub, apiToken); + Response addDataToPublishedVersion = UtilIT.addDatasetMetadataViaNative(datasetPersistentId, pathToJsonFilePostPub, apiToken); addDataToPublishedVersion.prettyPrint(); addDataToPublishedVersion.then().assertThat().statusCode(OK.getStatusCode()); @@ -731,6 +754,42 @@ public void testCreatePublishDestroyDataset() { } + @Test + public void testHideMetadataBlocksInDatasetVersionsAPI() { + + // Create user + String apiToken = UtilIT.createRandomUserGetToken(); + + // Create user with no permission + String apiTokenNoPerms = UtilIT.createRandomUserGetToken(); + + // Create Collection + String collectionAlias = UtilIT.createRandomCollectionGetAlias(apiToken); + + // Create Dataset + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(collectionAlias, apiToken); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId"); + + // Now check that the metadata is NOT shown, when we ask the versions api to dos o. + boolean excludeMetadata = true; + Response unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken, true,excludeMetadata, false); + unpublishedDraft.prettyPrint(); + unpublishedDraft.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.metadataBlocks", equalTo(null)); + + // Now check that the metadata is shown, when we ask the versions api to dos o. + excludeMetadata = false; + unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken,true, excludeMetadata, false); + unpublishedDraft.prettyPrint(); + unpublishedDraft.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.metadataBlocks", notNullValue() ); + } /** * The apis (/api/datasets/{id}/versions and /api/datasets/{id}/versions/{vid} * are already called from other RestAssured tests, in this class and also in FilesIT. @@ -4267,6 +4326,178 @@ public void testCitationDate() throws IOException { .statusCode(OK.getStatusCode()); } + @Test + public void testDataCiteExport() throws IOException { + + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverse = UtilIT.createRandomDataverse(apiToken); + createDataverse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverse); + + JsonObjectBuilder datasetJson = Json.createObjectBuilder() + .add("datasetVersion", Json.createObjectBuilder() + .add("license", Json.createObjectBuilder() + .add("name", "CC0 1.0") + .add("uri", "http://creativecommons.org/publicdomain/zero/1.0") + ) + .add("metadataBlocks", Json.createObjectBuilder() + .add("citation", Json.createObjectBuilder() + .add("fields", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("typeName", "title") + .add("value", "Test dataset") + .add("typeClass", "primitive") + .add("multiple", false) + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("authorName", + Json.createObjectBuilder() + .add("value", "Simpson, Homer") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "authorName") + ) + .add("authorAffiliation", + Json.createObjectBuilder() + .add("value", "https://ror.org/03vek6s52") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "authorAffiliation") + ) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "author") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("datasetContactEmail", + Json.createObjectBuilder() + .add("value", "hsimpson@mailinator.com") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "datasetContactEmail")) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "datasetContact") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("dsDescriptionValue", + Json.createObjectBuilder() + .add("value", "Just a test dataset.") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "dsDescriptionValue")) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "dsDescription") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add("Other") + ) + .add("typeClass", "controlledVocabulary") + .add("multiple", true) + .add("typeName", "subject") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("authorName", + Json.createObjectBuilder() + .add("value", "https://ror.org/01cwqze88") // NIH + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "grantNumberAgency") + ) + .add("authorAffiliation", + Json.createObjectBuilder() + .add("value", "12345") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "grantNumberValue") + ) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "grantNumber") + ) + ) + ) + )); + + Response createDatasetResponse = UtilIT.createDataset(dataverseAlias, datasetJson, apiToken); + createDatasetResponse.prettyPrint(); + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse); + String datasetPid = JsonPath.from(createDatasetResponse.getBody().asString()).getString("data.persistentId"); + + Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverse.prettyPrint(); + publishDataverse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDataset.prettyPrint(); + publishDataset.then().assertThat().statusCode(OK.getStatusCode()); + + Response exportDatasetAsDataCite = UtilIT.exportDataset(datasetPid, "Datacite", apiToken, true); + exportDatasetAsDataCite.prettyPrint(); + exportDatasetAsDataCite.then().assertThat() + .body("resource.creators.creator[0].creatorName", equalTo("Simpson, Homer")) + // see below for additional affiliation assertions, which can vary + .body("resource.creators.creator[0].affiliation.@schemeURI", equalTo("https://ror.org")) + .body("resource.creators.creator[0].affiliation.@affiliationIdentifierScheme", equalTo("ROR")) + // see below for additional fundingReference assertions, which can vary + .body("resource.fundingReferences.fundingReference[0].awardNumber", equalTo("12345")) + .statusCode(OK.getStatusCode()); + + // Out of the box :CVocConf is not set. If you set it with + // https://github.com/gdcc/dataverse-external-vocab-support/blob/de011d239254ff7d651212c565f8604808dcd7e9/examples/config/grantNumberAgencyRor.json + // you can expect different results. + boolean authorsOrcidAndRorEnabled = false; + if (authorsOrcidAndRorEnabled) { + exportDatasetAsDataCite.then().assertThat() + .body("resource.creators.creator[0].affiliation", equalTo("Harvard University")) + // Once https://github.com/IQSS/dataverse/pull/11175 is merged the equalTo bellow + // should be "https://ror.org/03vek6s52" instead of "Harvard University". + .body("resource.creators.creator[0].affiliation.@affiliationIdentifier", equalTo("Harvard University")); + } else { + exportDatasetAsDataCite.then().assertThat() + .body("resource.creators.creator[0].affiliation", equalTo("https://ror.org/03vek6s52")) + .body("resource.creators.creator[0].affiliation.@affiliationIdentifier", equalTo("https://ror.org/03vek6s52")); + } + + // Out of the box :CVocConf is not set. If you set it with + // https://github.com/gdcc/dataverse-external-vocab-support/blob/de011d239254ff7d651212c565f8604808dcd7e9/examples/config/grantNumberAgencyRor.json + // you can expect different results. + boolean grantNumberAgencyRorEnabled = false; + if (grantNumberAgencyRorEnabled) { + exportDatasetAsDataCite.then().assertThat() + .body("resource.fundingReferences.fundingReference[0].funderName", equalTo("National Institutes of Health")) + .body("resource.fundingReferences.fundingReference[0].funderIdentifier.@funderIdentifierType", equalTo("ROR")) + .body("resource.fundingReferences.fundingReference[0].funderIdentifier.@schemeURI", equalTo("https://ror.org")) + .body("resource.fundingReferences.fundingReference[0].funderIdentifier", equalTo("https://ror.org/01cwqze88")); + } else { + exportDatasetAsDataCite.then().assertThat() + .body("resource.fundingReferences.fundingReference[0].funderName", equalTo("https://ror.org/01cwqze88")) + .body("resource.fundingReferences.fundingReference[0].awardNumber", equalTo("12345")); + } + } + @Test public void getVersionFiles() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItemsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItemsIT.java new file mode 100644 index 00000000000..032c1739d53 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItemsIT.java @@ -0,0 +1,112 @@ +package edu.harvard.iq.dataverse.api; + +import edu.harvard.iq.dataverse.util.BundleUtil; +import io.restassured.RestAssured; +import io.restassured.path.json.JsonPath; +import io.restassured.response.Response; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import java.text.MessageFormat; + +import static jakarta.ws.rs.core.Response.Status.*; +import static org.hamcrest.CoreMatchers.equalTo; + +public class DataverseFeaturedItemsIT { + + @BeforeAll + public static void setUpClass() { + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + } + + @Test + public void testDeleteFeaturedItem() { + String apiToken = createUserAndGetApiToken(); + String dataverseAlias = createDataverseAndGetAlias(apiToken); + Long featuredItemId = createFeaturedItemAndGetId(dataverseAlias, apiToken, "src/test/resources/images/coffeeshop.png"); + + // Should return not found when passing incorrect item id + Response deleteFeatureItemResponse = UtilIT.deleteDataverseFeaturedItem(100000L, apiToken); + deleteFeatureItemResponse.then() + .body("message", equalTo(MessageFormat.format(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.notFound"), 100000L))) + .assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // Should return unauthorized when passing correct id and user does not have permissions + String randomUserApiToken = createUserAndGetApiToken(); + deleteFeatureItemResponse = UtilIT.deleteDataverseFeaturedItem(featuredItemId, randomUserApiToken); + deleteFeatureItemResponse.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); + + // Should delete featured item when passing correct id and user has permissions + deleteFeatureItemResponse = UtilIT.deleteDataverseFeaturedItem(featuredItemId, apiToken); + deleteFeatureItemResponse.then() + .body("data.message", equalTo(MessageFormat.format(BundleUtil.getStringFromBundle("dataverseFeaturedItems.delete.successful"), featuredItemId))) + .assertThat().statusCode(OK.getStatusCode()); + + Response listFeaturedItemsResponse = UtilIT.listDataverseFeaturedItems(dataverseAlias, apiToken); + listFeaturedItemsResponse.then() + .body("data.size()", equalTo(0)) + .assertThat().statusCode(OK.getStatusCode()); + } + + @Test + public void testUpdateFeaturedItem() { + String apiToken = createUserAndGetApiToken(); + String dataverseAlias = createDataverseAndGetAlias(apiToken); + Long featuredItemId = createFeaturedItemAndGetId(dataverseAlias, apiToken, "src/test/resources/images/coffeeshop.png"); + + // Should return not found when passing incorrect item id + Response updateFeatureItemResponse = UtilIT.updateDataverseFeaturedItem(100000L, "updatedTitle", 1, false, null, apiToken); + updateFeatureItemResponse.then() + .body("message", equalTo(MessageFormat.format(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.notFound"), 100000L))) + .assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // Should return unauthorized when passing correct id and user does not have permissions + String randomUserApiToken = createUserAndGetApiToken(); + updateFeatureItemResponse = UtilIT.updateDataverseFeaturedItem(featuredItemId, "updatedTitle", 1, false, null, randomUserApiToken); + updateFeatureItemResponse.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); + + // Update featured item: keep image file + updateFeatureItemResponse = UtilIT.updateDataverseFeaturedItem(featuredItemId, "updatedTitle1", 1, true, null, apiToken); + verifyUpdatedFeaturedItem(updateFeatureItemResponse, "updatedTitle1", "coffeeshop.png", 1); + + // Update featured item: remove image file + updateFeatureItemResponse = UtilIT.updateDataverseFeaturedItem(featuredItemId, "updatedTitle1", 2, false, null, apiToken); + verifyUpdatedFeaturedItem(updateFeatureItemResponse, "updatedTitle1", null, 2); + + // Update featured item: set new image file + updateFeatureItemResponse = UtilIT.updateDataverseFeaturedItem(featuredItemId, "updatedTitle1", 2, false, "src/test/resources/images/coffeeshop.png", apiToken); + verifyUpdatedFeaturedItem(updateFeatureItemResponse, "updatedTitle1", "coffeeshop.png", 2); + + // Update featured item: set malicious content which should be sanitized + String unsafeContent = "

A title

link"; + String sanitizedContent = "

A title

link"; + updateFeatureItemResponse = UtilIT.updateDataverseFeaturedItem(featuredItemId, unsafeContent, 2, false, "src/test/resources/images/coffeeshop.png", apiToken); + verifyUpdatedFeaturedItem(updateFeatureItemResponse, sanitizedContent, "coffeeshop.png", 2); + } + + private String createUserAndGetApiToken() { + Response createUserResponse = UtilIT.createRandomUser(); + return UtilIT.getApiTokenFromResponse(createUserResponse); + } + + private String createDataverseAndGetAlias(String apiToken) { + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + return UtilIT.getAliasFromResponse(createDataverseResponse); + } + + private Long createFeaturedItemAndGetId(String dataverseAlias, String apiToken, String pathToTestFile) { + Response createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, "test", 0, pathToTestFile); + createFeatureItemResponse.then().assertThat().statusCode(OK.getStatusCode()); + JsonPath createdFeaturedItem = JsonPath.from(createFeatureItemResponse.body().asString()); + return createdFeaturedItem.getLong("data.id"); + } + + private void verifyUpdatedFeaturedItem(Response response, String expectedContent, String expectedImageFileName, int expectedDisplayOrder) { + response.then().assertThat() + .body("data.content", equalTo(expectedContent)) + .body("data.imageFileName", equalTo(expectedImageFileName)) + .body("data.displayOrder", equalTo(expectedDisplayOrder)) + .statusCode(OK.getStatusCode()); + } +} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 13c4c30190b..825465fcd9e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -14,6 +14,7 @@ import java.io.FileReader; import java.io.IOException; import java.nio.file.Paths; +import java.text.MessageFormat; import java.util.Arrays; import java.util.List; import java.util.logging.Logger; @@ -927,16 +928,18 @@ public void testListMetadataBlocks() { .body("data.size()", equalTo(1)) .body("data[0].name", is("citation")) .body("data[0].fields.title.displayOnCreate", equalTo(true)) - .body("data[0].fields.size()", is(10)) + .body("data[0].fields.size()", is(10)) // 28 - 18 child duplicates .body("data[0].fields.author.childFields.size()", is(4)); Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); + setMetadataBlocksResponse.prettyPrint(); setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); String[] testInputLevelNames = {"geographicCoverage", "country", "city", "notesText"}; boolean[] testRequiredInputLevels = {false, true, false, false}; boolean[] testIncludedInputLevels = {false, true, true, false}; Response updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, apiToken); + updateDataverseInputLevelsResponse.prettyPrint(); updateDataverseInputLevelsResponse.then().assertThat().statusCode(OK.getStatusCode()); // Dataverse not found @@ -947,6 +950,7 @@ public void testListMetadataBlocks() { String[] expectedAllMetadataBlockDisplayNames = {"Astronomy and Astrophysics Metadata", "Citation Metadata", "Geospatial Metadata"}; listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, false, false, apiToken); + listMetadataBlocksResponse.prettyPrint(); listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) @@ -1008,14 +1012,13 @@ public void testListMetadataBlocks() { // Since the included property of notesText is set to false, we should retrieve the total number of fields minus one int citationMetadataBlockIndex = geospatialMetadataBlockIndex == 0 ? 1 : 0; listMetadataBlocksResponse.then().assertThat() - .body(String.format("data[%d].fields.size()", citationMetadataBlockIndex), equalTo(34)); + .body(String.format("data[%d].fields.size()", citationMetadataBlockIndex), equalTo(34)); // 79 minus 45 child duplicates // Since the included property of geographicCoverage is set to false, we should retrieve the total number of fields minus one listMetadataBlocksResponse.then().assertThat() .body(String.format("data[%d].fields.size()", geospatialMetadataBlockIndex), equalTo(2)); - - listMetadataBlocksResponse = UtilIT.getMetadataBlock("geospatial"); + listMetadataBlocksResponse = UtilIT.getMetadataBlock("geospatial"); String actualGeospatialMetadataField1 = listMetadataBlocksResponse.then().extract().path(String.format("data.fields['geographicCoverage'].name")); String actualGeospatialMetadataField2 = listMetadataBlocksResponse.then().extract().path(String.format("data.fields['geographicCoverage'].childFields['country'].name")); String actualGeospatialMetadataField3 = listMetadataBlocksResponse.then().extract().path(String.format("data.fields['geographicCoverage'].childFields['city'].name")); @@ -1346,20 +1349,31 @@ public void testUpdateDataverse() { String[] newFacetIds = new String[]{"contributorName"}; String[] newMetadataBlockNames = new String[]{"citation", "geospatial", "biomedical"}; + // Assert that the error is returned for having both MetadataBlockNames and inheritMetadataBlocksFromParent Response updateDataverseResponse = UtilIT.updateDataverse( - testDataverseAlias, - newAlias, - newName, - newAffiliation, - newDataverseType, - newContactEmails, - newInputLevelNames, - newFacetIds, - newMetadataBlockNames, - apiToken + testDataverseAlias, newAlias, newName, newAffiliation, newDataverseType, newContactEmails, newInputLevelNames, + null, newMetadataBlockNames, apiToken, + Boolean.TRUE, Boolean.TRUE + ); + updateDataverseResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.metadatablocks.error.containslistandinheritflag"), "metadataBlockNames", "inheritMetadataBlocksFromParent"))); + + // Assert that the error is returned for having both facetIds and inheritFacetsFromParent + updateDataverseResponse = UtilIT.updateDataverse( + testDataverseAlias, newAlias, newName, newAffiliation, newDataverseType, newContactEmails, newInputLevelNames, + newFacetIds, null, apiToken, + Boolean.TRUE, Boolean.TRUE ); + updateDataverseResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.metadatablocks.error.containslistandinheritflag"), "facetIds", "inheritFacetsFromParent"))); // Assert dataverse properties are updated + updateDataverseResponse = UtilIT.updateDataverse( + testDataverseAlias, newAlias, newName, newAffiliation, newDataverseType, newContactEmails, newInputLevelNames, + newFacetIds, newMetadataBlockNames, apiToken + ); updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); String actualDataverseAlias = updateDataverseResponse.then().extract().path("data.alias"); assertEquals(newAlias, actualDataverseAlias); @@ -1396,7 +1410,60 @@ public void testUpdateDataverse() { Response getDataverseResponse = UtilIT.listDataverseFacets(oldDataverseAlias, apiToken); getDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + listMetadataBlocksResponse.prettyPrint(); + updateDataverseResponse = UtilIT.updateDataverse( + newAlias, newAlias, newName, newAffiliation, newDataverseType, newContactEmails, + null, + null, + null, + apiToken + ); + updateDataverseResponse.prettyPrint(); + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + listMetadataBlocksResponse.prettyPrint(); + + + // Update the dataverse without including metadata blocks, facets, or input levels + // ignore the missing data so the metadata blocks, facets, and input levels are NOT deleted and inherited from the parent + updateDataverseResponse = UtilIT.updateDataverse( + newAlias, newAlias, newName, newAffiliation, newDataverseType, newContactEmails, + null, + null, + null, + apiToken + ); + updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that the metadata blocks are untouched and NOT inherited from the parent + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + listMetadataBlocksResponse.prettyPrint(); + listMetadataBlocksResponse + .then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(3)) + .body("data[0].name", equalTo(actualDataverseMetadataBlock1)) + .body("data[1].name", equalTo(actualDataverseMetadataBlock2)) + .body("data[2].name", equalTo(actualDataverseMetadataBlock3)); + // Assert that the dataverse should still have its input level(s) + listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(newAlias, apiToken); + listDataverseInputLevelsResponse.prettyPrint(); + listDataverseInputLevelsResponse + .then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)) + .body("data[0].datasetFieldTypeName", equalTo("geographicCoverage")); + // Assert that the dataverse should still have its Facets + listDataverseFacetsResponse = UtilIT.listDataverseFacets(newAlias, apiToken); + listDataverseFacetsResponse.prettyPrint(); + listDataverseFacetsResponse + .then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)) + .body("data", hasItem("contributorName")); + // Update the dataverse without setting metadata blocks, facets, or input levels + // Do NOT ignore the missing data so the metadata blocks, facets, and input levels are deleted and inherited from the parent updateDataverseResponse = UtilIT.updateDataverse( newAlias, newAlias, @@ -1407,12 +1474,14 @@ public void testUpdateDataverse() { null, null, null, - apiToken + apiToken, + Boolean.TRUE, Boolean.TRUE ); updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); // Assert that the metadata blocks are inherited from the parent listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + listMetadataBlocksResponse.prettyPrint(); listMetadataBlocksResponse .then().assertThat() .statusCode(OK.getStatusCode()) @@ -1576,4 +1645,285 @@ public void testGetUserPermissionsOnDataverse() { Response getUserPermissionsOnDataverseInvalidIdResponse = UtilIT.getUserPermissionsOnDataverse("testInvalidAlias", apiToken); getUserPermissionsOnDataverseInvalidIdResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } + + @Test + public void testCreateFeaturedItem() { + Response createUserResponse = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse); + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + // Should not return any error when not passing a file + + Response createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, "test", 0, null); + createFeatureItemResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.content", equalTo("test")) + .body("data.imageFileName", equalTo(null)) + .body("data.displayOrder", equalTo(0)); + + // Should not return any error when passing correct file and data + + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, "test", 1, pathToTestFile); + createFeatureItemResponse.then().assertThat() + .body("data.content", equalTo("test")) + .body("data.imageFileName", equalTo("coffeeshop.png")) + .body("data.displayOrder", equalTo(1)) + .statusCode(OK.getStatusCode()); + + // Should return bad request error when passing incorrect file type + + pathToTestFile = "src/test/resources/tab/test.tab"; + createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, "test", 0, pathToTestFile); + createFeatureItemResponse.then().assertThat() + .body("message", equalTo(BundleUtil.getStringFromBundle("dataverse.create.featuredItem.error.invalidFileType"))) + .statusCode(BAD_REQUEST.getStatusCode()); + + // Should return unauthorized error when user has no permissions + + Response createRandomUser = UtilIT.createRandomUser(); + String randomUserApiToken = UtilIT.getApiTokenFromResponse(createRandomUser); + createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, randomUserApiToken, "test", 0, pathToTestFile); + createFeatureItemResponse.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); + + // Should return not found error when dataverse does not exist + + createFeatureItemResponse = UtilIT.createDataverseFeaturedItem("thisDataverseDoesNotExist", apiToken, "test", 0, pathToTestFile); + createFeatureItemResponse.then().assertThat() + .body("message", equalTo("Can't find dataverse with identifier='thisDataverseDoesNotExist'")) + .statusCode(NOT_FOUND.getStatusCode()); + } + + @Test + public void testListFeaturedItems() { + Response createUserResponse = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse); + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + // Create test items + + List ids = Arrays.asList(0L, 0L, 0L); + List contents = Arrays.asList("Content 1", "Content 2", "Content 3"); + List orders = Arrays.asList(2, 1, 0); + List keepFiles = Arrays.asList(false, false, false); + List pathsToFiles = Arrays.asList("src/test/resources/images/coffeeshop.png", null, null); + + Response updateDataverseFeaturedItemsResponse = UtilIT.updateDataverseFeaturedItems(dataverseAlias, ids, contents, orders, keepFiles, pathsToFiles, apiToken); + updateDataverseFeaturedItemsResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Items should be retrieved with all their properties and sorted by displayOrder + + Response listDataverseFeaturedItemsResponse = UtilIT.listDataverseFeaturedItems(dataverseAlias, apiToken); + listDataverseFeaturedItemsResponse.then().assertThat() + .body("data.size()", equalTo(3)) + .body("data[0].content", equalTo("Content 3")) + .body("data[0].imageFileName", equalTo(null)) + .body("data[0].imageFileUrl", equalTo(null)) + .body("data[0].displayOrder", equalTo(0)) + .body("data[1].content", equalTo("Content 2")) + .body("data[1].imageFileName", equalTo(null)) + .body("data[1].imageFileUrl", equalTo(null)) + .body("data[1].displayOrder", equalTo(1)) + .body("data[2].content", equalTo("Content 1")) + .body("data[2].imageFileName", equalTo("coffeeshop.png")) + .body("data[2].imageFileUrl", containsString("/api/access/dataverseFeaturedItemImage/")) + .body("data[2].displayOrder", equalTo(2)) + .statusCode(OK.getStatusCode()); + + // Should return not found error when dataverse does not exist + + listDataverseFeaturedItemsResponse = UtilIT.listDataverseFeaturedItems("thisDataverseDoesNotExist", apiToken); + listDataverseFeaturedItemsResponse.then().assertThat() + .body("message", equalTo("Can't find dataverse with identifier='thisDataverseDoesNotExist'")) + .statusCode(NOT_FOUND.getStatusCode()); + + } + + @Test + public void testUpdateFeaturedItems() { + Response createUserResponse = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse); + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + String baseUri = UtilIT.getRestAssuredBaseUri(); + + // Create new items + + List ids = Arrays.asList(0L, 0L, 0L); + List contents = Arrays.asList("Content 1", "Content 2", "Content 3"); + List orders = Arrays.asList(0, 1, 2); + List keepFiles = Arrays.asList(false, false, false); + List pathsToFiles = Arrays.asList("src/test/resources/images/coffeeshop.png", null, null); + + Response updateDataverseFeaturedItemsResponse = UtilIT.updateDataverseFeaturedItems(dataverseAlias, ids, contents, orders, keepFiles, pathsToFiles, apiToken); + updateDataverseFeaturedItemsResponse.then().assertThat() + .body("data.size()", equalTo(3)) + .body("data[0].content", equalTo("Content 1")) + .body("data[0].imageFileName", equalTo("coffeeshop.png")) + .body("data[0].imageFileUrl", containsString("/api/access/dataverseFeaturedItemImage/")) + .body("data[0].displayOrder", equalTo(0)) + .body("data[1].content", equalTo("Content 2")) + .body("data[1].imageFileName", equalTo(null)) + .body("data[1].imageFileUrl", equalTo(null)) + .body("data[1].displayOrder", equalTo(1)) + .body("data[2].content", equalTo("Content 3")) + .body("data[2].imageFileName", equalTo(null)) + .body("data[2].imageFileUrl", equalTo(null)) + .body("data[2].displayOrder", equalTo(2)) + .statusCode(OK.getStatusCode()); + + Long firstItemId = JsonPath.from(updateDataverseFeaturedItemsResponse.body().asString()).getLong("data[0].id"); + Long secondItemId = JsonPath.from(updateDataverseFeaturedItemsResponse.body().asString()).getLong("data[1].id"); + Long thirdItemId = JsonPath.from(updateDataverseFeaturedItemsResponse.body().asString()).getLong("data[2].id"); + + // Update first item (content, order, and keeping image), delete the rest and create new items + + ids = Arrays.asList(firstItemId, 0L, 0L); + contents = Arrays.asList("Content 1 updated", "Content 2", "Content 3"); + orders = Arrays.asList(1, 0, 2); + keepFiles = Arrays.asList(true, false, false); + pathsToFiles = Arrays.asList(null, null, null); + + updateDataverseFeaturedItemsResponse = UtilIT.updateDataverseFeaturedItems(dataverseAlias, ids, contents, orders, keepFiles, pathsToFiles, apiToken); + updateDataverseFeaturedItemsResponse.then().assertThat() + .body("data.size()", equalTo(3)) + .body("data[0].content", equalTo("Content 2")) + .body("data[0].imageFileName", equalTo(null)) + .body("data[0].imageFileUrl", equalTo(null)) + .body("data[0].displayOrder", equalTo(0)) + .body("data[1].content", equalTo("Content 1 updated")) + .body("data[1].imageFileName", equalTo("coffeeshop.png")) + .body("data[1].imageFileUrl", containsString("/api/access/dataverseFeaturedItemImage/")) + .body("data[1].displayOrder", equalTo(1)) + .body("data[2].content", equalTo("Content 3")) + .body("data[2].imageFileName", equalTo(null)) + .body("data[2].imageFileUrl", equalTo(null)) + .body("data[2].displayOrder", equalTo(2)) + .statusCode(OK.getStatusCode()); + + Long firstItemIdAfterUpdate = JsonPath.from(updateDataverseFeaturedItemsResponse.body().asString()).getLong("data[1].id"); + Long secondItemIdAfterUpdate = JsonPath.from(updateDataverseFeaturedItemsResponse.body().asString()).getLong("data[0].id"); + Long thirdItemIdAfterUpdate = JsonPath.from(updateDataverseFeaturedItemsResponse.body().asString()).getLong("data[2].id"); + + assertEquals(firstItemId, firstItemIdAfterUpdate); + assertNotEquals(secondItemId, secondItemIdAfterUpdate); + assertNotEquals(thirdItemId, thirdItemIdAfterUpdate); + + // Update first item (removing image), update second item (adding image), delete the third item and create a new item + + ids = Arrays.asList(firstItemId, secondItemIdAfterUpdate, 0L); + contents = Arrays.asList("Content 1 updated", "Content 2", "Content 3"); + orders = Arrays.asList(1, 0, 2); + keepFiles = Arrays.asList(false, false, false); + pathsToFiles = Arrays.asList(null, "src/test/resources/images/coffeeshop.png", null); + + updateDataverseFeaturedItemsResponse = UtilIT.updateDataverseFeaturedItems(dataverseAlias, ids, contents, orders, keepFiles, pathsToFiles, apiToken); + updateDataverseFeaturedItemsResponse.then().assertThat() + .body("data.size()", equalTo(3)) + .body("data[0].content", equalTo("Content 2")) + .body("data[0].imageFileName", equalTo("coffeeshop.png")) + .body("data[0].imageFileUrl", containsString("/api/access/dataverseFeaturedItemImage/")) + .body("data[0].displayOrder", equalTo(0)) + .body("data[1].content", equalTo("Content 1 updated")) + .body("data[1].imageFileName", equalTo(null)) + .body("data[1].imageFileUrl", equalTo(null)) + .body("data[1].displayOrder", equalTo(1)) + .body("data[2].content", equalTo("Content 3")) + .body("data[2].imageFileName", equalTo(null)) + .body("data[2].imageFileUrl", equalTo(null)) + .body("data[2].displayOrder", equalTo(2)) + .statusCode(OK.getStatusCode()); + + Long firstItemIdAfterSecondUpdate = JsonPath.from(updateDataverseFeaturedItemsResponse.body().asString()).getLong("data[1].id"); + Long secondItemIdAfterSecondUpdate = JsonPath.from(updateDataverseFeaturedItemsResponse.body().asString()).getLong("data[0].id"); + Long thirdItemIdAfterSecondUpdate = JsonPath.from(updateDataverseFeaturedItemsResponse.body().asString()).getLong("data[2].id"); + + assertEquals(firstItemId, firstItemIdAfterSecondUpdate); + assertEquals(secondItemIdAfterUpdate, secondItemIdAfterSecondUpdate); + assertNotEquals(thirdItemIdAfterUpdate, thirdItemIdAfterSecondUpdate); + + // Only keep first featured item + + ids = List.of(firstItemId); + contents = List.of("Content 1 updated"); + orders = List.of(0); + keepFiles = List.of(false); + pathsToFiles = null; + + updateDataverseFeaturedItemsResponse = UtilIT.updateDataverseFeaturedItems(dataverseAlias, ids, contents, orders, keepFiles, pathsToFiles, apiToken); + updateDataverseFeaturedItemsResponse.then().assertThat() + .body("data.size()", equalTo(1)) + .body("data[0].content", equalTo("Content 1 updated")) + .body("data[0].imageFileName", equalTo(null)) + .body("data[0].imageFileUrl", equalTo(null)) + .body("data[0].displayOrder", equalTo(0)) + .statusCode(OK.getStatusCode()); + + // Should return unauthorized error when user has no permissions + + Response createRandomUser = UtilIT.createRandomUser(); + String randomUserApiToken = UtilIT.getApiTokenFromResponse(createRandomUser); + updateDataverseFeaturedItemsResponse = UtilIT.updateDataverseFeaturedItems(dataverseAlias, ids, contents, orders, keepFiles, pathsToFiles, randomUserApiToken); + updateDataverseFeaturedItemsResponse.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); + + // Should return not found error when dataverse does not exist + + updateDataverseFeaturedItemsResponse = UtilIT.updateDataverseFeaturedItems("thisDataverseDoesNotExist", ids, contents, orders, keepFiles, pathsToFiles, apiToken); + updateDataverseFeaturedItemsResponse.then().assertThat() + .body("message", equalTo("Can't find dataverse with identifier='thisDataverseDoesNotExist'")) + .statusCode(NOT_FOUND.getStatusCode()); + } + + @Test + public void testDeleteFeaturedItems() { + Response createUserResponse = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse); + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + // Create test featured items + + List ids = Arrays.asList(0L, 0L, 0L); + List contents = Arrays.asList("Content 1", "Content 2", "Content 3"); + List orders = Arrays.asList(0, 1, 2); + List keepFiles = Arrays.asList(false, false, false); + List pathsToFiles = Arrays.asList("src/test/resources/images/coffeeshop.png", null, null); + + Response updateDataverseFeaturedItemsResponse = UtilIT.updateDataverseFeaturedItems(dataverseAlias, ids, contents, orders, keepFiles, pathsToFiles, apiToken); + updateDataverseFeaturedItemsResponse.then().assertThat() + .body("data.size()", equalTo(3)) + .statusCode(OK.getStatusCode()); + + // Check that the featured items are successfully deleted when calling the delete endpoint + + Response deleteDataverseFeaturedItemsResponse = UtilIT.deleteDataverseFeaturedItems(dataverseAlias, apiToken); + deleteDataverseFeaturedItemsResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response listFeaturedItemsResponse = UtilIT.listDataverseFeaturedItems(dataverseAlias, apiToken); + listFeaturedItemsResponse.then() + .body("data.size()", equalTo(0)) + .assertThat().statusCode(OK.getStatusCode()); + + // Should return unauthorized error when user has no permissions + + Response createRandomUser = UtilIT.createRandomUser(); + String randomUserApiToken = UtilIT.getApiTokenFromResponse(createRandomUser); + deleteDataverseFeaturedItemsResponse = UtilIT.deleteDataverseFeaturedItems(dataverseAlias, randomUserApiToken); + deleteDataverseFeaturedItemsResponse.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); + + // Should return not found error when dataverse does not exist + + deleteDataverseFeaturedItemsResponse = UtilIT.deleteDataverseFeaturedItems("thisDataverseDoesNotExist", apiToken); + deleteDataverseFeaturedItemsResponse.then().assertThat() + .body("message", equalTo("Can't find dataverse with identifier='thisDataverseDoesNotExist'")) + .statusCode(NOT_FOUND.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java index 22abf6fa2e3..1956e0eb8df 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java @@ -11,11 +11,11 @@ import java.nio.file.Paths; import jakarta.json.Json; import jakarta.json.JsonArray; -import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; import jakarta.json.JsonReader; import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; import static jakarta.ws.rs.core.Response.Status.CREATED; +import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; import static jakarta.ws.rs.core.Response.Status.OK; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; @@ -37,6 +37,108 @@ public void testGetExternalTools() { getExternalTools.prettyPrint(); } + @Test + public void testExternalToolsNonAdminEndpoint() { + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + createUser.then().assertThat() + .statusCode(OK.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + UtilIT.setSuperuserStatus(username, true); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.prettyPrint(); + createDataverseResponse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDataset.prettyPrint(); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = JsonPath.from(createDataset.getBody().asString()).getInt("data.id"); + String datasetPid = JsonPath.from(createDataset.getBody().asString()).getString("data.persistentId"); + + String toolManifest = """ +{ + "displayName": "Dataset Configurator", + "description": "Slices! Dices! More info.", + "types": [ + "configure" + ], + "scope": "dataset", + "toolUrl": "https://datasetconfigurator.com", + "toolParameters": { + "queryParameters": [ + { + "datasetPid": "{datasetPid}" + }, + { + "localeCode": "{localeCode}" + } + ] + } + } +"""; + + Response addExternalTool = UtilIT.addExternalTool(JsonUtil.getJsonObject(toolManifest), apiToken); + addExternalTool.prettyPrint(); + addExternalTool.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.displayName", CoreMatchers.equalTo("Dataset Configurator")); + + Long toolId = JsonPath.from(addExternalTool.getBody().asString()).getLong("data.id"); + Response getExternalToolsByDatasetId = UtilIT.getExternalToolForDatasetById(datasetId.toString(), "configure", apiToken, toolId.toString()); + getExternalToolsByDatasetId.prettyPrint(); + getExternalToolsByDatasetId.then().assertThat() + .body("data.displayName", CoreMatchers.equalTo("Dataset Configurator")) + .body("data.scope", CoreMatchers.equalTo("dataset")) + .body("data.types[0]", CoreMatchers.equalTo("configure")) + .body("data.toolUrlWithQueryParams", CoreMatchers.equalTo("https://datasetconfigurator.com?datasetPid=" + datasetPid)) + .statusCode(OK.getStatusCode()); + + Response getExternalTools = UtilIT.getExternalTools(apiToken); + getExternalTools.prettyPrint(); + getExternalTools.then().assertThat() + .statusCode(OK.getStatusCode()); + Response getExternalTool = UtilIT.getExternalTool(toolId, apiToken); + getExternalTool.prettyPrint(); + getExternalTool.then().assertThat() + .statusCode(OK.getStatusCode()); + + // non superuser can only view tools + UtilIT.setSuperuserStatus(username, false); + getExternalTools = UtilIT.getExternalTools(apiToken); + getExternalTools.then().assertThat() + .statusCode(OK.getStatusCode()); + getExternalToolsByDatasetId = UtilIT.getExternalToolForDatasetById(datasetId.toString(), "configure", apiToken, toolId.toString()); + getExternalToolsByDatasetId.prettyPrint(); + getExternalToolsByDatasetId.then().assertThat() + .statusCode(OK.getStatusCode()); + + //Add by non-superuser will fail + addExternalTool = UtilIT.addExternalTool(JsonUtil.getJsonObject(toolManifest), apiToken); + addExternalTool.then().assertThat() + .statusCode(FORBIDDEN.getStatusCode()) + .body("message", CoreMatchers.equalTo("Superusers only.")); + + //Delete by non-superuser will fail + Response deleteExternalTool = UtilIT.deleteExternalTool(toolId, apiToken); + deleteExternalTool.then().assertThat() + .statusCode(FORBIDDEN.getStatusCode()) + .body("message", CoreMatchers.equalTo("Superusers only.")); + + //Delete the tool added by this test... + UtilIT.setSuperuserStatus(username, true); + deleteExternalTool = UtilIT.deleteExternalTool(toolId, apiToken); + deleteExternalTool.prettyPrint(); + deleteExternalTool.then().assertThat() + .statusCode(OK.getStatusCode()); + } + @Test public void testFileLevelTool1() { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index 340eab161bb..f84c5ad1a20 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -268,7 +268,7 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte assertEquals("inActive", clientStatus, "Unexpected client status: "+clientStatus); // b) Confirm that it has actually succeeded: - assertEquals("SUCCESS", responseJsonPath.getString("data.lastResult"), "Last harvest not reported a success (took "+i+" seconds)"); + assertTrue(responseJsonPath.getString("data.lastResult").contains("Completed"), "Last harvest not reported a success (took "+i+" seconds)"); String harvestTimeStamp = responseJsonPath.getString("data.lastHarvest"); assertNotNull(harvestTimeStamp); @@ -288,6 +288,8 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte // Let's give the asynchronous indexing an extra sec. to finish: Thread.sleep(1000L); + // Requires the index-harvested-metadata-source Flag feature to be enabled to search on the nickName + // Otherwise, the search must be performed with metadataSource:Harvested Response searchHarvestedDatasets = UtilIT.search("metadataSource:" + nickName, normalUserAPIKey); searchHarvestedDatasets.then().assertThat().statusCode(OK.getStatusCode()); searchHarvestedDatasets.prettyPrint(); @@ -299,6 +301,11 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte } // verify count after collecting global ids assertEquals(expectedNumberOfSetsHarvested, jsonPath.getInt("data.total_count")); + + // ensure the publisher name is present in the harvested dataset citation + Response harvestedDataverse = given().get(ARCHIVE_URL + "/api/dataverses/1"); + String harvestedDataverseName = harvestedDataverse.getBody().jsonPath().getString("data.name"); + assertTrue(jsonPath.getString("data.items[0].citation").contains(harvestedDataverseName)); // Fail if it hasn't completed in maxWait seconds assertTrue(i < maxWait); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java index 3b0b56740eb..c0312a5d46c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java @@ -27,7 +27,7 @@ public static void setUpClass() { void testListMetadataBlocks() { // No optional params enabled Response listMetadataBlocksResponse = UtilIT.listMetadataBlocks(false, false); - int expectedDefaultNumberOfMetadataBlocks = 6; + int expectedDefaultNumberOfMetadataBlocks = 7; listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].fields", equalTo(null)) @@ -44,8 +44,7 @@ void testListMetadataBlocks() { // returnDatasetFieldTypes=true listMetadataBlocksResponse = UtilIT.listMetadataBlocks(false, true); - int expectedNumberOfMetadataFields = 35; - listMetadataBlocksResponse.prettyPrint(); + int expectedNumberOfMetadataFields = 35; // 80 - 45 child duplicates; listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].fields", not(equalTo(null))) @@ -57,7 +56,7 @@ void testListMetadataBlocks() { // onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true listMetadataBlocksResponse = UtilIT.listMetadataBlocks(true, true); listMetadataBlocksResponse.prettyPrint(); - expectedNumberOfMetadataFields = 10; + expectedNumberOfMetadataFields = 10; // 28 - 18 child duplicates listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].fields", not(equalTo(null))) @@ -107,5 +106,34 @@ void testDatasetWithAllDefaultMetadata() { createDataset.then().assertThat() .body("status", CoreMatchers.equalTo("OK")); } + + @Test + void testDatasetWithAdditionalDefaultMetadata() { + // given + Response createUser = UtilIT.createRandomUser(); + assumeTrue(createUser.statusCode() < 300, + "code=" + createUser.statusCode() + + ", response=" + createUser.prettyPrint()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + assumeFalse(apiToken == null || apiToken.isBlank()); + + Response createCollection = UtilIT.createRandomDataverse(apiToken); + assumeTrue(createCollection.statusCode() < 300, + "code=" + createCollection.statusCode() + + ", response=" + createCollection.prettyPrint()); + String dataverseAlias = UtilIT.getAliasFromResponse(createCollection); + assumeFalse(dataverseAlias == null || dataverseAlias.isBlank()); + + // when + String pathToJsonFile = "scripts/api/data/dataset-create-new-additional-default-fields.json"; + Response createDataset = UtilIT.createDatasetViaNativeApi(dataverseAlias, pathToJsonFile, apiToken); + + // then + assertEquals(CREATED.getStatusCode(), createDataset.statusCode(), + "code=" + createDataset.statusCode() + + ", response=" + createDataset.prettyPrint()); + createDataset.then().assertThat() + .body("status", CoreMatchers.equalTo("OK")); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java index d15fda3a1a1..7e0a4714b1f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java @@ -4,6 +4,7 @@ import io.restassured.RestAssured; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; +import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; import java.util.logging.Logger; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -69,7 +70,15 @@ public void testCreateDeleteRoles() { body = addBuiltinRoleResponse.getBody().asString(); status = JsonPath.from(body).getString("status"); assertEquals("OK", status); - + + Response createNoPermsUser = UtilIT.createRandomUser(); + createNoPermsUser.prettyPrint(); + String noPermsapiToken = UtilIT.getApiTokenFromResponse(createNoPermsUser); + + Response noPermsResponse = UtilIT.viewDataverseRole("testRole", noPermsapiToken); + noPermsResponse.prettyPrint(); + noPermsResponse.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); + Response viewDataverseRoleResponse = UtilIT.viewDataverseRole("testRole", apiToken); viewDataverseRoleResponse.prettyPrint(); body = viewDataverseRoleResponse.getBody().asString(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java index 74150ca120a..c57e50e4acb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java @@ -15,11 +15,17 @@ import io.restassured.path.json.JsonPath; import io.restassured.response.Response; import io.restassured.specification.RequestSpecification; +import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import static jakarta.ws.rs.core.Response.Status.OK; import java.io.ByteArrayInputStream; +import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.lang3.math.NumberUtils; import static org.hamcrest.CoreMatchers.equalTo; @@ -27,6 +33,8 @@ import org.junit.jupiter.api.Assertions; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -377,6 +385,195 @@ public void testDirectUpload() { String contentsOfDownloadedFile = downloadFile.getBody().asString(); assertEquals(contentsOfFile, contentsOfDownloadedFile); + Response getFileData = UtilIT.getFileData(fileId, apiToken); + getFileData.prettyPrint(); + getFileData.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.label", equalTo("file1.txt")) + .body("data.dataFile.filename", equalTo("file1.txt")) + .body("data.dataFile.contentType", equalTo("text/plain")) + .body("data.dataFile.filesize", equalTo(6)); + + Response deleteFile = UtilIT.deleteFileApi(Integer.parseInt(fileId), apiToken); + deleteFile.prettyPrint(); + deleteFile.then().assertThat().statusCode(200); + + AmazonS3Exception expectedException = null; + try { + s3localstack.getObjectAsString(BUCKET_NAME, keyInS3); + } catch (AmazonS3Exception ex) { + expectedException = ex; + } + assertNotNull(expectedException); + // 404 because the file has been sucessfully deleted + assertEquals(404, expectedException.getStatusCode()); + + } + + @Test + public void testDirectUploadDetectStataFile() { + String driverId = "localstack1"; + String driverLabel = "LocalStack"; + Response createSuperuser = UtilIT.createRandomUser(); + createSuperuser.then().assertThat().statusCode(200); + String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser); + String superusername = UtilIT.getUsernameFromResponse(createSuperuser); + UtilIT.makeSuperUser(superusername).then().assertThat().statusCode(200); + Response storageDrivers = UtilIT.listStorageDrivers(superuserApiToken); + storageDrivers.prettyPrint(); + // TODO where is "Local/local" coming from? + String drivers = """ +{ + "status": "OK", + "data": { + "LocalStack": "localstack1", + "MinIO": "minio1", + "Local": "local", + "Filesystem": "file1" + } +}"""; + + //create user who will make a dataverse/dataset + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(200); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.prettyPrint(); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response originalStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken); + originalStorageDriver.prettyPrint(); + originalStorageDriver.then().assertThat() + .body("data.message", equalTo("undefined")) + .statusCode(200); + + Response setStorageDriverToS3 = UtilIT.setStorageDriver(dataverseAlias, driverLabel, superuserApiToken); + setStorageDriverToS3.prettyPrint(); + setStorageDriverToS3.then().assertThat() + .statusCode(200); + + Response updatedStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken); + updatedStorageDriver.prettyPrint(); + updatedStorageDriver.then().assertThat() + .statusCode(200); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.prettyPrint(); + createDatasetResponse.then().assertThat().statusCode(201); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + String datasetPid = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + String datasetStorageIdentifier = datasetPid.substring(4); + + Response getDatasetMetadata = UtilIT.nativeGet(datasetId, apiToken); + getDatasetMetadata.prettyPrint(); + getDatasetMetadata.then().assertThat().statusCode(200); + + long size = 1000000000l; + Response getUploadUrls = UtilIT.getUploadUrls(datasetPid, size, apiToken); + getUploadUrls.prettyPrint(); + getUploadUrls.then().assertThat().statusCode(200); + + String url = JsonPath.from(getUploadUrls.asString()).getString("data.url"); + String partSize = JsonPath.from(getUploadUrls.asString()).getString("data.partSize"); + String storageIdentifier = JsonPath.from(getUploadUrls.asString()).getString("data.storageIdentifier"); + System.out.println("url: " + url); + System.out.println("partSize: " + partSize); + System.out.println("storageIdentifier: " + storageIdentifier); + + System.out.println("uploading file via direct upload"); + String decodedUrl = null; + try { + decodedUrl = URLDecoder.decode(url, StandardCharsets.UTF_8.name()); + } catch (UnsupportedEncodingException ex) { + } + + // change to localhost because LocalStack is running in a container locally + String localhostUrl = decodedUrl.replace("http://localstack", "http://localhost"); + + Path stataFilePath = Paths.get("scripts/search/data/tabular/stata14-auto-withstrls.dta"); + InputStream inputStream = null; + try { + inputStream = java.nio.file.Files.newInputStream(stataFilePath); + } catch (IOException ex) { + Logger.getLogger(S3AccessIT.class.getName()).log(Level.SEVERE, null, ex); + } + Response uploadFileDirect = UtilIT.uploadFileDirect(localhostUrl, inputStream); + uploadFileDirect.prettyPrint(); + /* + Direct upload to MinIO is failing with errors like this: + + SignatureDoesNotMatch + The request signature we calculated does not match the signature you provided. Check your key and signing method. + 10.5072/FK2/KGFCEJ/18b8c06688c-21b8320a3ee5 + mybucket + /mybucket/10.5072/FK2/KGFCEJ/18b8c06688c-21b8320a3ee5 + 1793915CCC5BC95C + dd9025bab4ad464b049177c95eb6ebf374d3b3fd1af9251148b658df7ac2e3e8 + + */ + uploadFileDirect.then().assertThat().statusCode(200); + + // TODO: Use MD5 or whatever Dataverse is configured for and + // actually calculate it. + // + // Note that we falsely set mimeType=application/octet-stream so that + // later we can test file detection. The ".dta" file extension is + // necessary for file detection to work. + String jsonData = """ +{ + "description": "My description.", + "directoryLabel": "data/subdir1", + "categories": [ + "Data" + ], + "restrict": "false", + "storageIdentifier": "%s", + "fileName": "stata14-auto-withstrls.dta", + "mimeType": "application/octet-stream", + "checksum": { + "@type": "SHA-1", + "@value": "123456" + } +} +""".formatted(storageIdentifier); + + // "There was an error when trying to add the new file. File size must be explicitly specified when creating DataFiles with Direct Upload" + Response addRemoteFile = UtilIT.addRemoteFile(datasetId.toString(), jsonData, apiToken); + addRemoteFile.prettyPrint(); + addRemoteFile.then().assertThat() + .statusCode(200); + + String fileId = JsonPath.from(addRemoteFile.asString()).getString("data.files[0].dataFile.id"); + Response getfileMetadata = UtilIT.getFileData(fileId, apiToken); + getfileMetadata.prettyPrint(); + getfileMetadata.then().assertThat().statusCode(200); + + String keyInDataverse = storageIdentifier.split(":")[2]; + Assertions.assertEquals(driverId + "://" + BUCKET_NAME + ":" + keyInDataverse, storageIdentifier); + + String keyInS3 = datasetStorageIdentifier + "/" + keyInDataverse; + // UtilIT.MAXIMUM_INGEST_LOCK_DURATION is 3 but not long enough. + assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION + 3), "Failed test if Ingest Lock exceeds max duration " + keyInS3); + + Response getFileData1 = UtilIT.getFileData(fileId, apiToken); + getFileData1.prettyPrint(); + getFileData1.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.dataFile.originalFileName", equalTo("stata14-auto-withstrls.dta")) + .body("data.dataFile.originalFileFormat", equalTo("application/x-stata-14")) + .body("data.dataFile.filename", equalTo("stata14-auto-withstrls.tab")) + .body("data.dataFile.contentType", equalTo("text/tab-separated-values")); + + Response redetectDryRun = UtilIT.redetectFileType(fileId, false, apiToken); + redetectDryRun.prettyPrint(); + redetectDryRun.then().assertThat() + // Tabular files can't be redetected. See discussion in + // https://github.com/IQSS/dataverse/issues/9429 + // and the change in https://github.com/IQSS/dataverse/pull/9768 + .statusCode(BAD_REQUEST.getStatusCode()); + Response deleteFile = UtilIT.deleteFileApi(Integer.parseInt(fileId), apiToken); deleteFile.prettyPrint(); deleteFile.then().assertThat().statusCode(200); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java index c97762526b0..504e5e707c9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java @@ -1303,6 +1303,327 @@ public void testGeospatialSearchInvalid() { } + @Test + public void testRangeQueries() { + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.prettyPrint(); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + // Using the "astrophysics" block because it contains all field types relevant for range queries + // (int, float and date) + Response setMetadataBlocks = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); + setMetadataBlocks.prettyPrint(); + setMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); + + JsonObjectBuilder datasetJson = Json.createObjectBuilder() + .add("datasetVersion", Json.createObjectBuilder() + .add("metadataBlocks", Json.createObjectBuilder() + .add("citation", Json.createObjectBuilder() + .add("fields", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("typeName", "title") + .add("value", "Test Astrophysics Dataset") + .add("typeClass", "primitive") + .add("multiple", false) + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("authorName", + Json.createObjectBuilder() + .add("value", "Simpson, Homer") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "authorName")) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "author") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("datasetContactEmail", + Json.createObjectBuilder() + .add("value", "hsimpson@mailinator.com") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "datasetContactEmail")) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "datasetContact") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("dsDescriptionValue", + Json.createObjectBuilder() + .add("value", "This is a test dataset.") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "dsDescriptionValue")) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "dsDescription") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add("Other") + ) + .add("typeClass", "controlledVocabulary") + .add("multiple", true) + .add("typeName", "subject") + ) + ) + ) + .add("astrophysics", Json.createObjectBuilder() + .add("fields", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("typeName", "coverage.Temporal") + .add("typeClass", "compound") + .add("multiple", true) + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("coverage.Temporal.StartTime", + Json.createObjectBuilder() + .add("value", "2015-01-01") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "coverage.Temporal.StartTime") + ) + ) + ) + ) + .add(Json.createObjectBuilder() + .add("typeName", "coverage.ObjectCount") + .add("typeClass", "primitive") + .add("multiple", false) + .add("value", "9000") + ) + .add(Json.createObjectBuilder() + .add("typeName", "coverage.SkyFraction") + .add("typeClass", "primitive") + .add("multiple", false) + .add("value", "0.002") + ) + ) + ) + )); + + Response createDatasetResponse = UtilIT.createDataset(dataverseAlias, datasetJson, apiToken); + createDatasetResponse.prettyPrint(); + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse); + String datasetPid = JsonPath.from(createDatasetResponse.getBody().asString()).getString("data.persistentId"); + + // Integer range query: Hit + Response search1 = UtilIT.search("id:dataset_" + datasetId + "_draft AND coverage.ObjectCount:[1000 TO 10000]", apiToken, "&show_entity_ids=true"); + search1.prettyPrint(); + search1.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count", CoreMatchers.is(1)) + .body("data.count_in_response", CoreMatchers.is(1)) + .body("data.items[0].entity_id", CoreMatchers.is(datasetId)); + + // Integer range query: Miss + Response search2 = UtilIT.search("id:dataset_" + datasetId + "_draft AND coverage.ObjectCount:[* TO 1000]", apiToken); + search2.prettyPrint(); + search2.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count", CoreMatchers.is(0)) + .body("data.count_in_response", CoreMatchers.is(0)); + + // Float range query: Hit + Response search3 = UtilIT.search("id:dataset_" + datasetId + "_draft AND coverage.SkyFraction:[0 TO 0.5]", apiToken, "&show_entity_ids=true"); + search3.prettyPrint(); + search3.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count", CoreMatchers.is(1)) + .body("data.count_in_response", CoreMatchers.is(1)) + .body("data.items[0].entity_id", CoreMatchers.is(datasetId)); + + // Float range query: Miss + Response search4 = UtilIT.search("id:dataset_" + datasetId + "_draft AND coverage.SkyFraction:[0.5 TO 1]", apiToken); + search4.prettyPrint(); + search4.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count", CoreMatchers.is(0)) + .body("data.count_in_response", CoreMatchers.is(0)); + + // Date range query: Hit + Response search5 = UtilIT.search("id:dataset_" + datasetId + "_draft AND coverage.Temporal.StartTime:2015", apiToken, "&show_entity_ids=true"); + search5.prettyPrint(); + search5.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count", CoreMatchers.is(1)) + .body("data.count_in_response", CoreMatchers.is(1)) + .body("data.items[0].entity_id", CoreMatchers.is(datasetId)); + + // Date range query: Miss + Response search6 = UtilIT.search("id:dataset_" + datasetId + "_draft AND coverage.Temporal.StartTime:[2020 TO *]", apiToken); + search6.prettyPrint(); + search6.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count", CoreMatchers.is(0)) + .body("data.count_in_response", CoreMatchers.is(0)); + + // Combining all three range queries: Hit + Response search7 = UtilIT.search("id:dataset_" + datasetId + "_draft AND coverage.ObjectCount:[1000 TO 10000] AND coverage.SkyFraction:[0 TO 0.5] AND coverage.Temporal.StartTime:2015", apiToken, "&show_entity_ids=true"); + search7.prettyPrint(); + search7.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count", CoreMatchers.is(1)) + .body("data.count_in_response", CoreMatchers.is(1)) + .body("data.items[0].entity_id", CoreMatchers.is(datasetId)); + + // Combining all three range queries: Miss + Response search8 = UtilIT.search("id:dataset_" + datasetId + "_draft AND coverage.ObjectCount:[* TO 1000] AND coverage.SkyFraction:[0.5 TO 1] AND coverage.Temporal.StartTime:[2020 TO *]", apiToken); + search8.prettyPrint(); + search8.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count", CoreMatchers.is(0)) + .body("data.count_in_response", CoreMatchers.is(0)); + + } + + @Test + public void testSearchWithInvalidDateField() { + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.prettyPrint(); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response setMetadataBlocks = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation"), apiToken); + setMetadataBlocks.prettyPrint(); + setMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode()); + + // Adding a dataset with a date in the "timePeriodCoveredStart" field that doesn't match Solr's date format + // (ISO-8601 format, e.g. YYYY-MM-DDThh:mm:ssZ, YYYYY-MM-DD, YYYY-MM, YYYY) + // (See: https://solr.apache.org/guide/solr/latest/indexing-guide/date-formatting-math.html) + // So the date currently cannot be indexed + JsonObjectBuilder datasetJson = Json.createObjectBuilder() + .add("datasetVersion", Json.createObjectBuilder() + .add("metadataBlocks", Json.createObjectBuilder() + .add("citation", Json.createObjectBuilder() + .add("fields", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("typeName", "title") + .add("value", "Test Dataset") + .add("typeClass", "primitive") + .add("multiple", false) + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("authorName", + Json.createObjectBuilder() + .add("value", "Simpson, Homer") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "authorName")) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "author") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("datasetContactEmail", + Json.createObjectBuilder() + .add("value", "hsimpson@mailinator.com") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "datasetContactEmail")) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "datasetContact") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("dsDescriptionValue", + Json.createObjectBuilder() + .add("value", "This is a test dataset.") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "dsDescriptionValue")) + ) + ) + .add("typeClass", "compound") + .add("multiple", true) + .add("typeName", "dsDescription") + ) + .add(Json.createObjectBuilder() + .add("value", Json.createArrayBuilder() + .add("Other") + ) + .add("typeClass", "controlledVocabulary") + .add("multiple", true) + .add("typeName", "subject") + ) + .add(Json.createObjectBuilder() + .add("typeName", "timePeriodCovered") + .add("typeClass", "compound") + .add("multiple", true) + .add("value", Json.createArrayBuilder() + .add(Json.createObjectBuilder() + .add("timePeriodCoveredStart", + Json.createObjectBuilder() + .add("value", "15-01-01") + .add("typeClass", "primitive") + .add("multiple", false) + .add("typeName", "timePeriodCoveredStart") + ) + ) + ) + ) + ) + ) + )); + + Response createDatasetResponse = UtilIT.createDataset(dataverseAlias, datasetJson, apiToken); + createDatasetResponse.prettyPrint(); + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse); + String datasetPid = JsonPath.from(createDatasetResponse.getBody().asString()).getString("data.persistentId"); + + // When querying on the date field: miss (because the date field was skipped during indexing) + Response search1 = UtilIT.search("id:dataset_" + datasetId + "_draft AND timePeriodCoveredStart:[2000 TO 2020]", apiToken); + search1.prettyPrint(); + search1.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count", CoreMatchers.is(0)) + .body("data.count_in_response", CoreMatchers.is(0)); + + // When querying not on the date field: the dataset can be found (only the date field was skipped during indexing, not the entire dataset) + Response search2 = UtilIT.search("id:dataset_" + datasetId + "_draft", apiToken, "&show_entity_ids=true"); + search2.prettyPrint(); + search2.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count", CoreMatchers.is(1)) + .body("data.count_in_response", CoreMatchers.is(1)) + .body("data.items[0].entity_id", CoreMatchers.is(datasetId)); + + } + @AfterEach public void tearDownDataverse() { File treesThumb = new File("scripts/search/data/binary/trees.png.thumb48"); @@ -1423,18 +1744,21 @@ public void testSearchFilesAndUrlImages() throws InterruptedException { } @Test - public void testShowTypeCounts() { + public void testShowTypeCounts() throws InterruptedException { //Create 1 user and 1 Dataverse/Collection Response createUser = UtilIT.createRandomUser(); String username = UtilIT.getUsernameFromResponse(createUser); String apiToken = UtilIT.getApiTokenFromResponse(createUser); String affiliation = "testAffiliation"; - // test total_count_per_object_type is not included because the results are empty + // test total_count_per_object_type is included with zero counts for each type Response searchResp = UtilIT.search(username, apiToken, "&show_type_counts=true"); searchResp.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.total_count_per_object_type", CoreMatchers.equalTo(null)); + .body("data.total_count_per_object_type.Dataverses", CoreMatchers.is(0)) + .body("data.total_count_per_object_type.Datasets", CoreMatchers.is(0)) + .body("data.total_count_per_object_type.Files", CoreMatchers.is(0)); + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken, affiliation); assertEquals(201, createDataverseResponse.getStatusCode()); @@ -1461,6 +1785,7 @@ public void testShowTypeCounts() { // This call forces a wait for dataset indexing to finish and gives time for file uploads to complete UtilIT.search("id:dataset_" + datasetId, apiToken); + UtilIT.sleepForReindex(datasetId, apiToken, 3); } // Test Search without show_type_counts @@ -1476,10 +1801,41 @@ public void testShowTypeCounts() { // Test Search with show_type_counts = TRUE searchResp = UtilIT.search(dataverseAlias, apiToken, "&show_type_counts=true"); searchResp.prettyPrint(); + searchResp.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.total_count_per_object_type.Dataverses", CoreMatchers.is(1)) .body("data.total_count_per_object_type.Datasets", CoreMatchers.is(3)) .body("data.total_count_per_object_type.Files", CoreMatchers.is(6)); + + + + // go through the same exercise with only a collection to verify that Dataasets and Files + // are there with a count of 0 + + createDataverseResponse = UtilIT.createRandomDataverse(apiToken, affiliation); + assertEquals(201, createDataverseResponse.getStatusCode()); + dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + sleep(4000); //make sure new dataverse gets indexed + + // Test Search without show_type_counts + searchResp = UtilIT.search(dataverseAlias, apiToken); + searchResp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count_per_object_type", CoreMatchers.equalTo(null)); + // Test Search with show_type_counts = FALSE + searchResp = UtilIT.search(dataverseAlias, apiToken, "&show_type_counts=false"); + searchResp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count_per_object_type", CoreMatchers.equalTo(null)); + // Test Search with show_type_counts = TRUE + searchResp = UtilIT.search(dataverseAlias, apiToken, "&show_type_counts=true"); + searchResp.prettyPrint(); + searchResp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.total_count_per_object_type.Dataverses", CoreMatchers.is(1)) + .body("data.total_count_per_object_type.Datasets", CoreMatchers.is(0)) + .body("data.total_count_per_object_type.Files", CoreMatchers.is(0)); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SendFeedbackApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SendFeedbackApiIT.java new file mode 100644 index 00000000000..000118a370f --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/api/SendFeedbackApiIT.java @@ -0,0 +1,243 @@ +package edu.harvard.iq.dataverse.api; + +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; +import io.restassured.RestAssured; +import io.restassured.path.json.JsonPath; +import io.restassured.response.Response; +import jakarta.json.Json; +import jakarta.json.JsonObjectBuilder; +import org.hamcrest.CoreMatchers; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import java.text.MessageFormat; + +import static jakarta.ws.rs.core.Response.Status.*; +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class SendFeedbackApiIT { + + @BeforeAll + public static void setUpClass() { + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + } + + @AfterEach + public void reset() { + UtilIT.deleteSetting(SettingsServiceBean.Key.RateLimitingCapacityByTierAndAction); + } + + @Test + public void testBadJson() { + Response response = UtilIT.sendFeedback("{'notValidJson'", null); + response.prettyPrint(); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", CoreMatchers.startsWith("Invalid JSON; error message:")); + } + + @Test + public void testSupportRequest() { + JsonObjectBuilder job = Json.createObjectBuilder(); + job.add("fromEmail", "from@mailinator.com"); + job.add("subject", "Help!"); + job.add("body", "I need help."); + + Response response = UtilIT.sendFeedback(job, null); + response.prettyPrint(); + response.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].fromEmail", CoreMatchers.equalTo("from@mailinator.com")); + } + + @Test + public void testSendFeedbackOnRootDataverse() { + JsonObjectBuilder job = Json.createObjectBuilder(); + long rootDataverseId = 1; + job.add("targetId", rootDataverseId); + job.add("fromEmail", "from@mailinator.com"); + job.add("toEmail", "to@mailinator.com"); + job.add("subject", "collaboration"); + job.add("body", "Are you interested writing a grant based on this research?"); + + Response response = UtilIT.sendFeedback(job, null); + response.prettyPrint(); + response.then().assertThat() + .statusCode(OK.getStatusCode()); + + job = Json.createObjectBuilder(); + job.add("identifier", "root"); + job.add("fromEmail", "from@mailinator.com"); + job.add("toEmail", "to@mailinator.com"); + job.add("subject", "collaboration"); + job.add("body", "Are you interested writing a grant based on this research?"); + + response = UtilIT.sendFeedback(job, null); + response.prettyPrint(); + response.then().assertThat() + .statusCode(OK.getStatusCode()); + } + + @Test + public void testSendFeedbackOnDataset() { + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + createUser.then().assertThat() + .statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + String fromEmail = UtilIT.getEmailFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + String pathToJsonFile = "scripts/api/data/dataset-create-new-all-default-fields.json"; + Response createDataset = UtilIT.createDatasetViaNativeApi(dataverseAlias, pathToJsonFile, apiToken); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + long datasetId = JsonPath.from(createDataset.body().asString()).getLong("data.id"); + String persistentId = JsonPath.from(createDataset.body().asString()).getString("data.persistentId"); + Response response; + String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(String.valueOf(datasetId), pathToFile, apiToken); + uploadResponse.prettyPrint(); + long fileId = JsonPath.from(uploadResponse.body().asString()).getLong("data.files[0].dataFile.id"); + + // Test with body text length to long (length of body after sanitizing/removing html = 67) + UtilIT.setSetting(SettingsServiceBean.Key.ContactFeedbackMessageSizeLimit, "60"); + response = UtilIT.sendFeedback(buildJsonEmail(0, persistentId, null), apiToken); + response.prettyPrint(); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", CoreMatchers.equalTo(MessageFormat.format(BundleUtil.getStringFromBundle("sendfeedback.body.error.exceedsLength"), 67, 60))); + // reset to unlimited + UtilIT.setSetting(SettingsServiceBean.Key.ContactFeedbackMessageSizeLimit, "0"); + + // Test with no body/body length =0 + response = UtilIT.sendFeedback(Json.createObjectBuilder().add("targetId", datasetId).add("subject", "collaboration").add("body", ""), apiToken); + response.prettyPrint(); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", CoreMatchers.equalTo(BundleUtil.getStringFromBundle("sendfeedback.body.error.isEmpty"))); + + // Test with missing subject + response = UtilIT.sendFeedback(Json.createObjectBuilder().add("targetId", datasetId).add("body", ""), apiToken); + response.prettyPrint(); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", CoreMatchers.equalTo(BundleUtil.getStringFromBundle("sendfeedback.body.error.missingRequiredFields"))); + + // Test send feedback on DataFile + // Test don't send fromEmail. Let it get it from the requesting user + response = UtilIT.sendFeedback(buildJsonEmail(fileId, null, null), apiToken); + response.prettyPrint(); + response.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].fromEmail", CoreMatchers.equalTo(fromEmail)); + + // Test guest calling with no token + fromEmail = "testEmail@example.com"; + response = UtilIT.sendFeedback(buildJsonEmail(datasetId, null, fromEmail), null); + response.prettyPrint(); + response.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].fromEmail", CoreMatchers.equalTo(fromEmail)); + validateEmail(response.body().asString()); + + // Test guest calling with no token and missing email + response = UtilIT.sendFeedback(buildJsonEmail(datasetId, null, null), null); + response.prettyPrint(); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", CoreMatchers.equalTo(BundleUtil.getStringFromBundle("sendfeedback.fromEmail.error.missing"))); + + // Test with invalid email - also tests that fromEmail trumps the users email if it is included in the Json + response = UtilIT.sendFeedback(buildJsonEmail(datasetId, null, "BADEmail"), apiToken); + response.prettyPrint(); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", CoreMatchers.equalTo(MessageFormat.format(BundleUtil.getStringFromBundle("sendfeedback.fromEmail.error.invalid"), "BADEmail"))); + + // Test with bad identifier + response = UtilIT.sendFeedback(buildJsonEmail(0, "BadIdentifier", null), apiToken); + response.prettyPrint(); + response.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", CoreMatchers.equalTo(BundleUtil.getStringFromBundle("sendfeedback.request.error.targetNotFound"))); + } + + private JsonObjectBuilder buildJsonEmail(long targetId, String identifier, String fromEmail) { + JsonObjectBuilder job = Json.createObjectBuilder(); + if (targetId > 0) { + job.add("targetId", targetId); + } + if (identifier != null) { + job.add("identifier", identifier); + } + job.add("subject", "collaboration"); + job.add("body", "Are you interested writing a grant based on this research? {\", ''", + "'', ''", + // make sure we do not destroy the tags + "'\"Galactic', '\"Galactic'", + // make sure we do not destroy the tags + "'\"Galactic', '\"Galactic'", + "'

hellohello</

'", + "'

hello

', '

hello

'", + // make sure we keep text as it is when it is not html + "'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.', 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.'", + // Should add noopener noreferrer attributes to tags and keep classes + "'

A title

Lorem ipsumtestlink

', '

A title

Lorem ipsumtestlink

'", + "NULL, NULL" + }, nullValues = {"NULL"}) + public void testSanitizeAdvancedHTML(String unsafe, String safe) { + String sanitizedOutput = MarkupChecker.sanitizeAdvancedHTML(unsafe); + + // Normalize both the expected and actual content by removing whitespaces + + String normalizedSafe = null; + if (safe != null) { + normalizedSafe = safe.replaceAll("\\s+", "").trim(); + } + + String normalizedOutput = null; + if (sanitizedOutput != null) { + normalizedOutput = sanitizedOutput.replaceAll("\\s+", "").trim(); + } + + assertEquals(normalizedSafe, normalizedOutput); + } + /** * Test of stripAllTags method, of class MarkupChecker. */ @@ -37,6 +76,7 @@ public void testSanitizeBasicHTML(String unsafe, String safe) { "'', ''", "NULL, NULL", "Johnson & Johnson <>, Johnson & Johnson <>", + "

Johnson & Johnson

, Johnson & Johnson", "Johnson && Johnson <&>&, Johnson && Johnson <&>&" }, nullValues = {"NULL"}) public void testStripAllTags(String unsafe, String safe) { diff --git a/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java index 89f04e0cd5a..248c91d3934 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java @@ -133,7 +133,7 @@ public void testGuestUserGettingRateLimited() { } String key = RateLimitUtil.generateCacheKey(guestUser, action.getClass().getSimpleName()); assertTrue(cache.rateLimitCache.containsKey(key)); - assertTrue(rateLimited && cnt > 1 && cnt <= 30, "rateLimited:"+rateLimited + " cnt:"+cnt); + assertTrue(rateLimited && cnt > 1 && cnt <= 31, "rateLimited:"+rateLimited + " cnt:"+cnt); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java index 5ddcc190993..0049dd0d8f1 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/cache/RateLimitUtilTest.java @@ -148,5 +148,7 @@ private void resetRateLimitUtil(SystemConfig config, boolean enable) { doReturn(enable ? "100,200" : "").when(config).getRateLimitingDefaultCapacityTiers(); RateLimitUtil.rateLimitMap.clear(); RateLimitUtil.rateLimits.clear(); + // first call after setting changes returns RESET_CACHE so ignore it + RateLimitUtil.getCapacity(config, GuestUser.get(), "GetPrivateUrlCommand"); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java index 7ec8e0b25f3..1987307637c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java @@ -25,6 +25,7 @@ import jakarta.json.JsonString; import edu.harvard.iq.dataverse.util.BundleUtil; +import org.assertj.core.util.Lists; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.BeforeEach; @@ -268,6 +269,54 @@ public void testDatasetContactWithPrivacy() { } + @Test + public void testDatasetFieldTypesWithChildren() { + MetadataBlock block = new MetadataBlock(); + block.setId(0L); + block.setName("citation"); + long id = 0L; + // create datasetFieldTypes + List datasetFieldTypes = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + DatasetFieldType dft = new DatasetFieldType(); + dft.setId(id++); + dft.setDisplayOrder(i); + dft.setMetadataBlock(block); + dft.setFieldType(FieldType.TEXT); + dft.setName("subType" + dft.getId()); + dft.setTitle(dft.getName()); + dft.setChildDatasetFieldTypes(Lists.emptyList()); + datasetFieldTypes.add(dft); + } + // add DatasetFieldType as children to another DatasetFieldType to test the suppression of duplicate data + // adding 3 and 4 as children of 2 + datasetFieldTypes.get(3).setParentDatasetFieldType(datasetFieldTypes.get(2)); + datasetFieldTypes.get(4).setParentDatasetFieldType(datasetFieldTypes.get(2)); + datasetFieldTypes.get(2).setChildDatasetFieldTypes(List.of(datasetFieldTypes.get(3), datasetFieldTypes.get(4))); + // adding 6 as child of 9 + datasetFieldTypes.get(6).setParentDatasetFieldType(datasetFieldTypes.get(9)); + datasetFieldTypes.get(9).setChildDatasetFieldTypes(List.of(datasetFieldTypes.get(6))); + + block.setDatasetFieldTypes(datasetFieldTypes); + + DatasetFieldServiceBean nullDFServiceBean = null; + JsonPrinter.injectSettingsService(new MockSettingsSvc(), nullDFServiceBean); + + JsonObject jsonObject = JsonPrinter.json(block).build(); + assertNotNull(jsonObject); + + System.out.println("json: " + JsonUtil.prettyPrint(jsonObject.toString())); + assertEquals("subType2 subType3", jsonObject.getJsonObject("fields").getJsonObject("subType2") + .getJsonObject("childFields").getJsonObject("subType3").getString("displayName")); + assertEquals("subType2 subType4", jsonObject.getJsonObject("fields").getJsonObject("subType2") + .getJsonObject("childFields").getJsonObject("subType4").getString("displayName")); + assertEquals("subType9 subType6", jsonObject.getJsonObject("fields").getJsonObject("subType9") + .getJsonObject("childFields").getJsonObject("subType6").getString("displayName")); + assertNull(jsonObject.getJsonObject("fields").getJsonObject("subType3")); + assertNull(jsonObject.getJsonObject("fields").getJsonObject("subType4")); + assertNull(jsonObject.getJsonObject("fields").getJsonObject("subType6")); + } + @Test public void testDataversePrinter() { Dataverse dataverse = new Dataverse(); diff --git a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java index c4ee4547ed7..e69793c4d33 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java @@ -183,7 +183,7 @@ public void testZippedTwoShapefiles() throws IOException{ msgt("(2) testZippedTwoShapefiles"); // Create files and put them in a .zip - List file_names = Arrays.asList("shape1.shp", "shape1.shx", "shape1.dbf", "shape1.prj", "shape1.fbn", "shape1.fbx", // 1st shapefile + List file_names = Arrays.asList("shape1.shp", "shape1.shx", "shape1.DBF", "shape1.prj", "shape1.fbn", "shape1.fbx", // 1st shapefile "shape2.shp", "shape2.shx", "shape2.dbf", "shape2.prj", // 2nd shapefile "shape2.txt", "shape2.pdf", "shape2", // single files, same basename as 2nd shapefile "README.MD", "shp_dictionary.xls", "notes" ); //, "prj"); // single files @@ -211,7 +211,7 @@ public void testZippedTwoShapefiles() throws IOException{ assertTrue(file_groups.containsKey("shape2"), "verify key existance of 'shape2'"); // Verify the values - assertEquals(file_groups.get("shape1"), Arrays.asList("shp", "shx", "dbf", "prj", "fbn", "fbx"), "verify value of key 'shape1'"); + assertEquals(file_groups.get("shape1"), Arrays.asList("shp", "shx", "DBF", "prj", "fbn", "fbx"), "verify value of key 'shape1'"); assertEquals(file_groups.get("shape2"), Arrays.asList("shp", "shx", "dbf", "prj", "txt", "pdf", ShapefileHandler.BLANK_EXTENSION), "verify value of key 'shape2'"); // Rezip/Reorder the files @@ -240,7 +240,7 @@ public void testZippedShapefileWithExtraFiles() throws IOException{ msgt("(3) testZippedShapefileWithExtraFiles"); // Create files and put them in a .zip - List file_names = Arrays.asList("shape1.shp", "shape1.shx", "shape1.dbf", "shape1.prj", "shape1.pdf", "shape1.cpg", "shape1." + SHP_XML_EXTENSION, "README.md", "shape_notes.txt"); + List file_names = Arrays.asList("shape1.shp", "shape1.shx", "shape1.dbf", "shape1.prj", "shape1.pdf", "shape1.cpg", "shape1." + SHP_XML_EXTENSION, "README.md", "shape_notes.txt"); File zipfile_obj = createAndZipFiles(file_names, "shape-plus.zip"); // Pass the .zip to the ShapefileHandler diff --git a/src/test/resources/json/export-formats.json b/src/test/resources/json/export-formats.json index b4dc0168629..65fc746ee23 100644 --- a/src/test/resources/json/export-formats.json +++ b/src/test/resources/json/export-formats.json @@ -36,7 +36,7 @@ }, "schema.org": { "displayName": "Schema.org JSON-LD", - "mediaType": "application/json", + "mediaType": "application/ld+json", "isHarvestable": false, "isVisibleInUserInterface": true }, diff --git a/tests/integration-tests.txt b/tests/integration-tests.txt index e1dad7a75b1..73e028b4493 100644 --- a/tests/integration-tests.txt +++ b/tests/integration-tests.txt @@ -1 +1 @@ -DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,HarvestingClientsIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT,AuxiliaryFilesIT,InvalidCharactersIT,LicensesIT,NotificationsIT,BagIT,MetadataBlocksIT,NetcdfIT,SignpostingIT,FitsIT,LogoutIT,DataRetrieverApiIT,ProvIT,S3AccessIT,OpenApiIT,InfoIT,DatasetFieldsIT,SavedSearchIT,DatasetTypesIT \ No newline at end of file +DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,HarvestingClientsIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT,AuxiliaryFilesIT,InvalidCharactersIT,LicensesIT,NotificationsIT,BagIT,MetadataBlocksIT,NetcdfIT,SignpostingIT,FitsIT,LogoutIT,DataRetrieverApiIT,ProvIT,S3AccessIT,OpenApiIT,InfoIT,DatasetFieldsIT,SavedSearchIT,DatasetTypesIT,DataverseFeaturedItemsIT,SendFeedbackApiIT