diff --git a/.gitignore b/.gitignore
index ec135fb3618..4d08cfb2257 100644
--- a/.gitignore
+++ b/.gitignore
@@ -34,6 +34,7 @@ oauth-credentials.md
/src/main/webapp/oauth2/newAccount.html
scripts/api/setup-all.sh*
+scripts/api/setup-all.*.log
# ctags generated tag file
tags
diff --git a/conf/solr/7.7.2/schema_dv_mdb_copies.xml b/conf/solr/7.7.2/schema_dv_mdb_copies.xml
index 0208fdf3910..080cc71ef50 100644
--- a/conf/solr/7.7.2/schema_dv_mdb_copies.xml
+++ b/conf/solr/7.7.2/schema_dv_mdb_copies.xml
@@ -133,9 +133,13 @@
+
+
+
+
@@ -154,4 +158,4 @@
-
\ No newline at end of file
+
diff --git a/conf/solr/7.7.2/schema_dv_mdb_fields.xml b/conf/solr/7.7.2/schema_dv_mdb_fields.xml
index 6caa7c6de69..3f844c6183c 100644
--- a/conf/solr/7.7.2/schema_dv_mdb_fields.xml
+++ b/conf/solr/7.7.2/schema_dv_mdb_fields.xml
@@ -133,9 +133,13 @@
+
+
+
+
@@ -154,4 +158,4 @@
-
\ No newline at end of file
+
diff --git a/doc/release-notes/5.1-release-notes.md b/doc/release-notes/5.1-release-notes.md
new file mode 100644
index 00000000000..3d106b2df7b
--- /dev/null
+++ b/doc/release-notes/5.1-release-notes.md
@@ -0,0 +1,99 @@
+# Dataverse 5.1
+
+This release brings new features, enhancements, and bug fixes to Dataverse. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project.
+
+## Release Highlights
+
+### Large File Upload for Installations Using AWS S3
+
+The added support for multipart upload through the API and UI (Issue #6763) will allow files larger than 5 GB to be uploaded to Dataverse when an installation is running on AWS S3. Previously, only non-AWS S3 storage configurations would allow uploads larger than 5 GB.
+
+### Dataset-Specific Stores
+
+In previous releases, configuration options were added that allow each dataverse to have a specific store enabled. This release adds even more granularity, with the ability to set a dataset-level store.
+
+## Major Use Cases
+
+Newly-supported use cases in this release include:
+
+- Users can now upload files larger than 5 GB on installations running AWS S3 (Issue #6763, PR #6995)
+- Administrators will now be able to specify a store at the dataset level in addition to the Dataverse level (Issue #6872, PR #7272)
+- Users will have their dataset's directory structure retained when uploading a dataset with shapefiles (Issue #6873, PR #7279)
+- Users will now be able to download zip files through the experimental Zipper service when the set of downloaded files have duplicate names (Issue [#80](https://github.com/IQSS/dataverse.harvard.edu/issues/80), PR #7276)
+- Users will now be able to download zip files with the proper file structure through the experiment Zipper service (Issue #7255, PR #7258)
+- Administrators will be able to use new APIs to keep the Solr index and the DB in sync, allowing easier resolution of an issue that would occasionally cause stale search results to not load. (Issue #4225, PR #7211)
+
+## Notes for Dataverse Installation Administrators
+
+### New API for setting a Dataset-level Store
+
+- This release adds a new API for setting a dataset-specific store. Learn more in the Managing Dataverse and Datasets section of the [Admin Guide](http://guides.dataverse.org/en/5.1/admin/solr-search-index.html).
+
+### Multipart Upload Storage Monitoring, Recommended Use for Multipart Upload
+
+Charges may be incurred for storage reserved for multipart uploads that are not completed or cancelled. Administrators may want to do periodic manual or automated checks for open multipart uploads. Learn more in the Big Data Support section of the [Developers Guide](http://guides.dataverse.org/en/5.1/developer/big-data-support.html).
+
+While multipart uploads can support much larger files, and can have advantages in terms of robust transfer and speed, they are more complex than single part direct uploads. Administrators should consider taking advantage of the options to limit use of multipart uploads to specific users by using multiple stores and configuring access to stores with high file size limits to specific Dataverses (added in 4.20) or Datasets (added in this release).
+
+### New APIs for keeping Solr records in sync
+
+This release adds new APIs to keep the Solr index and the DB in sync, allowing easier resolution of an issue that would occasionally cause search results to not load. Learn more in the Solr section of the [Admin Guide](http://guides.dataverse.org/en/5.1/admin/solr-search-index.html).
+
+### Documentation for Purging the Ingest Queue
+
+At times, it may be necessary to cancel long-running Ingest jobs in the interest of system stability. The Troubleshooting section of the [Admin Guide](http://guides.dataverse.org/en/5.1/admin/) now has specific steps.
+
+### Biomedical Metadata Block Updated
+
+The Life Science Metadata block (biomedical.tsv) was updated. "Other Design Type", "Other Factor Type", "Other Technology Type", "Other Technology Platform" boxes were added. See the "Additional Upgrade Steps" below if you use this in your installation.
+
+## Notes for Tool Developers and Integrators
+
+### Spaces in File Names
+
+Dataverse Installations using S3 storage will no longer replace spaces in file names of downloaded files with the + character. If your tool or integration has any special handling around this, you may need to make further adjustments to maintain backwards compatibility while also supporting Dataverse installations on 5.1+.
+
+## Complete List of Changes
+
+For the complete list of code changes in this release, see the [5.1 Milestone](https://github.com/IQSS/dataverse/milestone/90?closed=1) in Github.
+
+For help with upgrading, installing, or general questions please post to the [Dataverse Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org.
+
+## Installation
+
+If this is a new installation, please see our [Installation Guide](http://guides.dataverse.org/en/5.1/installation/)
+
+## Upgrade Instructions
+
+0. These instructions assume that you've already successfully upgraded from Dataverse 4.x to Dataverse 5 following the instructions in the [Dataverse 5 Release Notes](https://github.com/IQSS/dataverse/releases/tag/v5.0).
+
+1. Undeploy the previous version.
+
+/payara/bin/asadmin list-applications
+/payara/bin/asadmin undeploy dataverse
+
+2. Stop payara and remove the generated directory, start.
+
+- service payara stop
+- remove the generated directory: rm -rf payara/payara/domains/domain1/generated
+- service payara start
+
+3. Deploy this version.
+/payara/bin/asadmin deploy dataverse-5.1.war
+
+4. Restart payara
+
+### Additional Upgrade Steps
+
+1. Update Biomedical Metadata Block (if used), Reload Solr, ReExportAll
+
+ `wget https://github.com/IQSS/dataverse/releases/download/v5.1/biomedical.tsv`
+ `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @biomedical.tsv -H "Content-type: text/tab-separated-values"`
+
+- copy schema_dv_mdb_fields.xml and schema_dv_mdb_copies.xml to solr server, for example into /usr/local/solr/solr-7.7.2/server/solr/collection1/conf/ directory
+- Restart Solr, or tell Solr to reload its configuration:
+
+ `curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"`
+
+- Run ReExportall to update JSON Exports
+
diff --git a/doc/release-notes/5.1.1-release-notes.md b/doc/release-notes/5.1.1-release-notes.md
new file mode 100644
index 00000000000..f5243aebc8f
--- /dev/null
+++ b/doc/release-notes/5.1.1-release-notes.md
@@ -0,0 +1,59 @@
+# Dataverse 5.1.1
+
+This minor release adds important scaling improvements for installations running on AWS S3. It is recommended that 5.1.1 be used in production instead of 5.1.
+
+## Release Highlights
+
+### Connection Pool Size Configuration Option, Connection Optimizations
+
+Dataverse 5.1 improved the efficiency of making S3 connections through use of an http connection pool. This release adds optimizations around closing streams and channels that may hold S3 http connections open and exhaust the connection pool. In parallel, this release increases the default pool size from 50 to 256 and adds the ability to increase the size of the connection pool, so a larger pool can be configured if needed.
+
+## Major Use Cases
+
+Newly-supported use cases in this release include:
+
+- Administrators of installations using S3 will be able to define the connection pool size, allowing better resource scaling for larger installations (Issue #7309, PR #7313)
+
+## Notes for Dataverse Installation Administrators
+
+### 5.1.1 vs. 5.1 for Production Use
+
+As mentioned above, we encourage 5.1.1 instead of 5.1 for production use.
+
+### New JVM Option for Connection Pool Size
+
+Larger installations may want to increase the number of open S3 connections allowed (default is 256). For example, to set the value to 4096:
+
+``./asadmin create-jvm-options "-Ddataverse.files..connection-pool-size=4096"`
+
+The JVM Options section of the [Configuration Guide](http://guides.dataverse.org/en/5.1.1/installation/config/) has more information.
+
+## Complete List of Changes
+
+For the complete list of code changes in this release, see the [5.1.1 Milestone](https://github.com/IQSS/dataverse/milestone/91?closed=1) in Github.
+
+For help with upgrading, installing, or general questions please post to the [Dataverse Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org.
+
+## Installation
+
+If this is a new installation, please see our [Installation Guide](http://guides.dataverse.org/en/5.1.1/installation/)
+
+## Upgrade Instructions
+
+0. These instructions assume that you've already successfully upgraded to Dataverse 5.1 following the instructions in the [Dataverse 5.1 Release Notes](https://github.com/IQSS/dataverse/releases/tag/v5.1).
+
+1. Undeploy the previous version.
+
+/payara/bin/asadmin list-applications
+/payara/bin/asadmin undeploy dataverse
+
+2. Stop payara and remove the generated directory, start.
+
+- service payara stop
+- remove the generated directory: rm -rf payara/payara/domains/domain1/generated
+- service payara start
+
+3. Deploy this version.
+/payara/bin/asadmin deploy dataverse-5.1.1.war
+
+4. Restart payara
diff --git a/doc/release-notes/6763-multipart-uploads.md b/doc/release-notes/6763-multipart-uploads.md
deleted file mode 100644
index ecec3efd9dc..00000000000
--- a/doc/release-notes/6763-multipart-uploads.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# Large Data Support (continued)
-
-Direct S3 uploads now support multi-part uploading of large files (> 1GB by default) via the user interface and the API (which is used in the [Dataverse Uploader](https://github.com/GlobalDataverseCommunityConsortium/dataverse-uploader)). This allows uploads larger than 5 GB when using Amazon AWS S3 stores.
\ No newline at end of file
diff --git a/doc/release-notes/7140-google-cloud.md b/doc/release-notes/7140-google-cloud.md
new file mode 100644
index 00000000000..62aef73acd0
--- /dev/null
+++ b/doc/release-notes/7140-google-cloud.md
@@ -0,0 +1,12 @@
+## Google Cloud Archiver
+
+Dataverse Bags can now be sent to a bucket in Google Cloud, including those in the 'Coldline' storage class, which provide less expensive but slower access.
+
+## Use Cases
+
+- As an Administrator I can set up a regular export to Google Cloud so that my users' data is preserved.
+
+## New Settings
+
+:GoogleCloudProject - the name of the project managing the bucket.
+:GoogleCloudBucket - the name of the bucket to use
\ No newline at end of file
diff --git a/doc/release-notes/7184-spaces-in-filenames.md b/doc/release-notes/7184-spaces-in-filenames.md
deleted file mode 100644
index 1a5b41068ce..00000000000
--- a/doc/release-notes/7184-spaces-in-filenames.md
+++ /dev/null
@@ -1,7 +0,0 @@
-## Notes for Tool Developers and Integrators
-
-### Filenames
-
-Dataverse Installations using S3 storage will no longer replace spaces in file names with the + character. If your tool or integration has any special handling around this character change, you can remove it.
-
-(review this note if this is in the same release as the fix for #7188)
\ No newline at end of file
diff --git a/doc/sphinx-guides/source/admin/dataverses-datasets.rst b/doc/sphinx-guides/source/admin/dataverses-datasets.rst
index 6349088beea..9c122c25abc 100644
--- a/doc/sphinx-guides/source/admin/dataverses-datasets.rst
+++ b/doc/sphinx-guides/source/admin/dataverses-datasets.rst
@@ -59,6 +59,8 @@ The available drivers can be listed with::
curl -H "X-Dataverse-key: $API_TOKEN" http://$SERVER/api/admin/dataverse/storageDrivers
+(Individual datasets can be configured to use specific file stores as well. See the "Datasets" section below.)
+
Datasets
--------
@@ -130,3 +132,23 @@ Diagnose Constraint Violations Issues in Datasets
To identify invalid data values in specific datasets (if, for example, an attempt to edit a dataset results in a ConstraintViolationException in the server log), or to check all the datasets in the Dataverse for constraint violations, see :ref:`Dataset Validation ` in the :doc:`/api/native-api` section of the User Guide.
+Configure a Dataset to store all new files in a specific file store
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Configure a dataset to use a specific file store (this API can only be used by a superuser) ::
+
+ curl -H "X-Dataverse-key: $API_TOKEN" -X PUT -d $storageDriverLabel http://$SERVER/api/datasets/$dataset-id/storageDriver
+
+The current driver can be seen using::
+
+ curl http://$SERVER/api/datasets/$dataset-id/storageDriver
+
+It can be reset to the default store as follows (only a superuser can do this) ::
+
+ curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE http://$SERVER/api/datasets/$dataset-id/storageDriver
+
+The available drivers can be listed with::
+
+ curl -H "X-Dataverse-key: $API_TOKEN" http://$SERVER/api/admin/dataverse/storageDrivers
+
+
diff --git a/doc/sphinx-guides/source/admin/solr-search-index.rst b/doc/sphinx-guides/source/admin/solr-search-index.rst
index 07e51b4564f..d37b7eedb26 100644
--- a/doc/sphinx-guides/source/admin/solr-search-index.rst
+++ b/doc/sphinx-guides/source/admin/solr-search-index.rst
@@ -14,6 +14,18 @@ There are two ways to perform a full reindex of the Dataverse search index. Star
Clear and Reindex
+++++++++++++++++
+
+Index and Database Consistency
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Get a list of all database objects that are missing in Solr, and Solr documents that are missing in the database:
+
+``curl http://localhost:8080/api/admin/index/status``
+
+Remove all Solr documents that are orphaned (ie not associated with objects in the database):
+
+``curl http://localhost:8080/api/admin/index/clear-orphans``
+
Clearing Data from Solr
~~~~~~~~~~~~~~~~~~~~~~~
@@ -81,4 +93,4 @@ If you suspect something isn't indexed properly in solr, you may bypass the Data
``curl "http://localhost:8983/solr/collection1/select?q=dsPersistentId:doi:10.15139/S3/HFV0AO"``
-to see the JSON you were hopefully expecting to see passed along to Dataverse.
\ No newline at end of file
+to see the JSON you were hopefully expecting to see passed along to Dataverse.
diff --git a/doc/sphinx-guides/source/admin/troubleshooting.rst b/doc/sphinx-guides/source/admin/troubleshooting.rst
index 0c752924b30..ec24de245b6 100644
--- a/doc/sphinx-guides/source/admin/troubleshooting.rst
+++ b/doc/sphinx-guides/source/admin/troubleshooting.rst
@@ -43,6 +43,26 @@ A User Needs Their Account to Be Converted From Institutional (Shibboleth), ORCI
See :ref:`converting-shibboleth-users-to-local` and :ref:`converting-oauth-users-to-local`.
+.. _troubleshooting-ingest:
+
+Ingest
+------
+
+Long-Running Ingest Jobs Have Exhausted System Resources
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Ingest is both CPU- and memory-intensive, and depending on your system resources and the size and format of tabular data files uploaded, may render Dataverse unresponsive or nearly inoperable. It is possible to cancel these jobs by purging the ingest queue.
+
+``/usr/local/payara5/mq/bin/imqcmd -u admin query dst -t q -n DataverseIngest`` will query the DataverseIngest destination. The password, unless you have changed it, matches the username.
+
+``/usr/local/payara5/mq/bin/imqcmd -u admin purge dst -t q -n DataverseIngest`` will purge the DataverseIngest queue, and prompt for your confirmation.
+
+Finally, list destinations to verify that the purge was successful::
+
+``/usr/local/payara5/mq/bin/imqcmd -u admin list dst``
+
+If you are still running Glassfish, substitute glassfish4 for payara5 above. If you have installed Dataverse in some other location, adjust the above paths accordingly.
+
.. _troubleshooting-payara:
Payara
diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst
index 03e8f5f3f39..3240ee9ebe0 100644
--- a/doc/sphinx-guides/source/api/native-api.rst
+++ b/doc/sphinx-guides/source/api/native-api.rst
@@ -989,16 +989,16 @@ Note that the dataset citation date field type must be a date field.
export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
export SERVER_URL=https://demo.dataverse.org
- export ID=24
- export DATASET_FIELD_TYPE_NAME=:dateOfDeposit
+ export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB
+ export DATASET_FIELD_TYPE_NAME=dateOfDeposit
- curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/$ID/citationdate --data "$DATASET_FIELD_TYPE_NAME"
+ curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/citationdate?persistentId=$PERSISTENT_IDENTIFIER --data "$DATASET_FIELD_TYPE_NAME"
The fully expanded example above (without environment variables) looks like this:
.. code-block:: bash
- curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/24/citationdate --data ":dateOfDeposit"
+ curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/citationdate?persistentId=doi:10.5072/FK2/J8SJZB --data "dateOfDeposit"
Revert Citation Date Field Type to Default for Dataset
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1009,15 +1009,15 @@ Restores the default citation date field type, ``:publicationDate``, for a given
export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
export SERVER_URL=https://demo.dataverse.org
- export ID=24
+ export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB
- curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/citationdate
+ curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/:persistentId/citationdate?persistentId=$PERSISTENT_IDENTIFIER
The fully expanded example above (without environment variables) looks like this:
.. code-block:: bash
- curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/citationdate
+ curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/:persistentId/citationdate?persistentId=doi:10.5072/FK2/J8SJZB
.. _list-roles-on-a-dataset-api:
@@ -1654,6 +1654,11 @@ The fully expanded example above (without environment variables) looks like this
Calling the destroy endpoint is permanent and irreversible. It will remove the dataset and its datafiles, then re-index the parent dataverse in Solr. This endpoint requires the API token of a superuser.
+Configure a Dataset to Use a Specific File Store
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+``/api/datasets/$dataset-id/storageDriver`` can be used to check, configure or reset the designated file store (storage driver) for a dataset. Please see the :doc:`/admin/dataverses-datasets` section of the guide for more information on this API.
+
Files
-----
diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py
index 17c68d38468..2cba6ba5491 100755
--- a/doc/sphinx-guides/source/conf.py
+++ b/doc/sphinx-guides/source/conf.py
@@ -65,9 +65,9 @@
# built documents.
#
# The short X.Y version.
-version = '5.0'
+version = '5.1.1'
# The full version, including alpha/beta/rc tags.
-release = '5.0'
+release = '5.1.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst
index 34da299528f..de8fbad3687 100644
--- a/doc/sphinx-guides/source/installation/config.rst
+++ b/doc/sphinx-guides/source/installation/config.rst
@@ -516,6 +516,9 @@ By default, your store will use the [default] profile in you .aws configuration
``./asadmin create-jvm-options "-Ddataverse.files..profile="``
+Larger installations may want to increase the number of open S3 connections allowed (default is 256): For example,
+
+``./asadmin create-jvm-options "-Ddataverse.files..connection-pool-size=4096"``
In case you would like to configure Dataverse to use a custom S3 service instead of Amazon S3 services, please
add the options for the custom URL and region as documented below. Please read above if your desired combination has
@@ -541,6 +544,7 @@ dataverse.files..custom-endpoint-region > Only used when
dataverse.files..path-style-access ``true``/``false`` Use path style buckets instead of subdomains. Optional. ``false``
dataverse.files..payload-signing ``true``/``false`` Enable payload signing. Optional ``false``
dataverse.files..chunked-encoding ``true``/``false`` Disable chunked encoding. Optional ``true``
+dataverse.files..connection-pool-size > The maximum number of open connections to the S3 server ``256``
=========================================== ================== ========================================================================= =============
Reported Working S3-Compatible Storage
@@ -772,6 +776,8 @@ For Google Analytics, the example script at :download:`analytics-code.html `_ serialized `OAI-ORE `_ map file, which is also available as a metadata export format in the Dataverse web interface.
-At present, the DPNSubmitToArchiveCommand and LocalSubmitToArchiveCommand are the only implementations extending the AbstractSubmitToArchiveCommand and using the configurable mechanisms discussed below.
+At present, the DPNSubmitToArchiveCommand, LocalSubmitToArchiveCommand, and GoogleCloudSubmitToArchive are the only implementations extending the AbstractSubmitToArchiveCommand and using the configurable mechanisms discussed below.
.. _Duracloud Configuration:
@@ -827,10 +833,41 @@ ArchiverClassName - the fully qualified class to be used for archiving. For exam
\:ArchiverSettings - the archiver class can access required settings including existing Dataverse settings and dynamically defined ones specific to the class. This setting is a comma-separated list of those settings. For example\:
-``curl http://localhost:8080/api/admin/settings/:ArchiverSettings -X PUT -d ":BagItLocalPathâ€``
+``curl http://localhost:8080/api/admin/settings/:ArchiverSettings -X PUT -d ":BagItLocalPath"``
:BagItLocalPath is the file path that you've set in :ArchiverSettings.
+.. _Google Cloud Configuration:
+
+Google Cloud Configuration
+++++++++++++++++++++++++++
+
+The Google Cloud Archiver can send Dataverse Bags to a bucket in Google's cloud, including those in the 'Coldline' storage class (cheaper, with slower access)
+
+``curl http://localhost:8080/api/admin/settings/:ArchiverClassName -X PUT -d "edu.harvard.iq.dataverse.engine.command.impl.GoogleCloudSubmitToArchiveCommand"``
+
+``curl http://localhost:8080/api/admin/settings/:ArchiverSettings -X PUT -d ":GoogleCloudBucket, :GoogleCloudProject"``
+
+The Google Cloud Archiver defines two custom settings, both are required. The credentials for your account, in the form of a json key file, must also be obtained and stored locally (see below):
+
+In order to use the Google Cloud Archiver, you must have a Google account. You will need to create a project and bucket within that account and provide those values in the settings:
+
+\:GoogleCloudBucket - the name of the bucket to use. For example:
+
+``curl http://localhost:8080/api/admin/settings/:GoogleCloudBucket -X PUT -d "qdr-archive"``
+
+\:GoogleCloudProject - the name of the project managing the bucket. For example:
+
+``curl http://localhost:8080/api/admin/settings/:GoogleCloudProject -X PUT -d "qdr-project"``
+
+The Google Cloud Archiver also requires a key file that must be renamed to 'googlecloudkey.json' and placed in the directory identified by your 'dataverse.files.directory' jvm option. This file can be created in the Google Cloud Console. (One method: Navigate to your Project 'Settings'/'Service Accounts', create an account, give this account the 'Cloud Storage'/'Storage Admin' role, and once it's created, use the 'Actions' menu to 'Create Key', selecting the 'JSON' format option. Use this as the 'googlecloudkey.json' file.)
+
+For example:
+
+``cp /usr/local/payara5/glassfish/domains/domain1/files/googlecloudkey.json``
+
+.. _Archiving API Call:
+
API Call
++++++++
@@ -2120,3 +2157,40 @@ To enable redirects to the zipper installed on the same server as the main Datav
To enable redirects to the zipper on a different server:
``curl -X PUT -d 'https://zipper.example.edu/cgi-bin/zipdownload' http://localhost:8080/api/admin/settings/:CustomZipDownloadServiceUrl``
+
+:ArchiverClassName
+++++++++++++++++++
+
+Dataverse can export archival "Bag" files to an extensible set of storage systems (see :ref:`BagIt Export` above for details about this and for further explanation of the other archiving related settings below).
+This setting specifies which storage system to use by identifying the particular Java class that should be run. Current options include DuraCloudSubmitToArchiveCommand, LocalSubmitToArchiveCommand, and GoogleCloudSubmitToArchiveCommand.
+
+``curl -X PUT -d 'LocalSubmitToArchiveCommand' http://localhost:8080/api/admin/settings/:ArchiverClassName``
+
+:ArchiverSettings
++++++++++++++++++
+
+Each Archiver class may have its own custom settings. Along with setting which Archiver class to use, one must use this setting to identify which setting values should be sent to it when it is invoked. The value should be a comma-separated list of setting names.
+For example, the LocalSubmitToArchiveCommand only uses the :BagItLocalPath setting. To allow the class to use that setting, this setting must set as:
+
+``curl -X PUT -d ':BagItLocalPath' http://localhost:8080/api/admin/settings/:ArchiverSettings``
+
+:DuraCloudHost
+++++++++++++++
+:DuraCloudPort
+++++++++++++++
+:DuraCloudContext
++++++++++++++++++
+
+These three settings define the host, port, and context used by the DuraCloudSubmitToArchiveCommand. :DuraCloudHost is required. The other settings have default values as noted in the :ref:`Duracloud Configuration` section above.
+
+:BagItLocalPath
++++++++++++++++
+
+This is the local file system path to be used with the LocalSubmitToArchiveCommand class. It is recommended to use an absolute path. See the :ref:`Local Path Configuration` section above.
+
+:GoogleCloudBucket
+++++++++++++++++++
+:GoogleCloudProject
++++++++++++++++++++
+
+These are the bucket and project names to be used with the GoogleCloudSubmitToArchiveCommand class. Further information is in the :ref:`Google Cloud Configuration` section above.
diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst
index a9f389fde54..0874d04f8ed 100755
--- a/doc/sphinx-guides/source/versions.rst
+++ b/doc/sphinx-guides/source/versions.rst
@@ -6,8 +6,10 @@ Dataverse Documentation Versions
This list provides a way to refer to the documentation for previous versions of Dataverse. In order to learn more about the updates delivered from one version to another, visit the `Releases `__ page in our GitHub repo.
-- 5.0
+- 5.1.1
+- `5.1 `__
+- `5.0 `__
- `4.20 `__
- `4.19 `__
- `4.18.1 `__
diff --git a/pom.xml b/pom.xml
index 6c9fa99dbc9..792941ed548 100644
--- a/pom.xml
+++ b/pom.xml
@@ -7,7 +7,7 @@
-->
edu.harvard.iqdataverse
- 5.0
+ 5.1.1wardataverse
@@ -57,7 +57,7 @@
-
@@ -127,6 +127,13 @@
httpclient${httpcomponents.client.version}
+
+ com.google.cloud
+ google-cloud-bom
+ 0.115.0-alpha
+ pom
+ import
+ org.testcontainerstestcontainers-bom
@@ -137,7 +144,7 @@
@@ -440,11 +447,6 @@
slf4j-log4j121.6.1
-
- axis
- axis
- 1.4
- io.searchboxjest
@@ -573,7 +575,7 @@
org.apache.tikatika-parsers
- 1.22
+ 1.24.1
@@ -581,6 +583,11 @@
opennlp-tools1.9.1
+
+ com.google.cloud
+ google-cloud-storage
+ 1.97.0
+
diff --git a/scripts/api/data/dataset-create-new-all-default-fields.json b/scripts/api/data/dataset-create-new-all-default-fields.json
index ba801b9bae8..7a82cd4bb75 100644
--- a/scripts/api/data/dataset-create-new-all-default-fields.json
+++ b/scripts/api/data/dataset-create-new-all-default-fields.json
@@ -181,7 +181,7 @@
"typeName": "dsDescriptionValue",
"multiple": false,
"typeClass": "primitive",
- "value": "DescriptionText 1"
+ "value": "DescriptionText1"
},
"dsDescriptionDate": {
"typeName": "dsDescriptionDate",
@@ -264,6 +264,53 @@
}
]
},
+ {
+ "typeName": "topicClassification",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "topicClassValue": {
+ "typeName": "topicClassValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "Topic Classification Term1"
+ },
+ "topicClassVocab": {
+ "typeName": "topicClassVocab",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "Topic Classification Vocab1"
+ },
+ "topicClassVocabURI": {
+ "typeName": "topicClassVocabURI",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "https://TopicClassificationURL1.com"
+ }
+ },
+ {
+ "topicClassValue": {
+ "typeName": "topicClassValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "Topic Classification Term2"
+ },
+ "topicClassVocab": {
+ "typeName": "topicClassVocab",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "Topic Classification Vocab2"
+ },
+ "topicClassVocabURI": {
+ "typeName": "topicClassVocabURI",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "https://TopicClassificationURL2.com"
+ }
+ }
+ ]
+ },
{
"typeName": "publication",
"multiple": true,
@@ -329,6 +376,15 @@
"typeClass": "primitive",
"value": "Notes1"
},
+ {
+ "typeName": "language",
+ "multiple": true,
+ "typeClass": "controlledVocabulary",
+ "value": [
+ "Abkhaz",
+ "Afar"
+ ]
+ },
{
"typeName": "producer",
"multiple": true,
diff --git a/scripts/api/data/metadatablocks/biomedical.tsv b/scripts/api/data/metadatablocks/biomedical.tsv
index f45c5849845..28d59130c34 100644
--- a/scripts/api/data/metadatablocks/biomedical.tsv
+++ b/scripts/api/data/metadatablocks/biomedical.tsv
@@ -1,295 +1,299 @@
-#metadataBlock name dataverseAlias displayName
- biomedical Life Sciences Metadata
-#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id
- studyDesignType Design Type Design types that are based on the overall experimental design. text 0 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
- studyFactorType Factor Type Factors used in the Dataset. text 1 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
- studyAssayOrganism Organism The taxonomic name of the organism used in the Dataset or from which the starting biological material derives. text 2 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
- studyAssayOtherOrganism Other Organism If Other was selected in Organism, list any other organisms that were used in this Dataset. Terms from the NCBI Taxonomy are recommended. text 3 TRUE FALSE TRUE TRUE FALSE FALSE biomedical
- studyAssayMeasurementType Measurement Type A term to qualify the endpoint, or what is being measured (e.g. gene expression profiling; protein identification). text 4 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
- studyAssayOtherMeasurmentType Other Measurement Type If Other was selected in Measurement Type, list any other measurement types that were used. Terms from NCBO Bioportal are recommended. text 5 TRUE FALSE TRUE TRUE FALSE FALSE biomedical
- studyAssayTechnologyType Technology Type A term to identify the technology used to perform the measurement (e.g. DNA microarray; mass spectrometry). text 6 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
- studyAssayPlatform Technology Platform The manufacturer and name of the technology platform used in the assay (e.g. Bruker AVANCE). text 7 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
- studyAssayCellType Cell Type The name of the cell line from which the source or sample derives. text 8 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
-#controlledVocabulary DatasetField Value identifier displayOrder
- studyDesignType Case Control EFO_0001427 0
- studyDesignType Cross Sectional EFO_0001428 1
- studyDesignType Cohort Study OCRE100078 2
- studyDesignType Nested Case Control Design NCI_C48202 3
- studyDesignType Not Specified OTHER_DESIGN 4
- studyDesignType Parallel Group Design OBI_0500006 5
- studyDesignType Perturbation Design OBI_0001033 6
- studyDesignType Randomized Controlled Trial MESH_D016449 7
- studyDesignType Technological Design TECH_DESIGN 8
- studyFactorType Age EFO_0000246 0
- studyFactorType Biomarkers BIOMARKERS 1
- studyFactorType Cell Surface Markers CELL_SURFACE_M 2
- studyFactorType Cell Type/Cell Line EFO_0000324;EFO_0000322 3
- studyFactorType Developmental Stage EFO_0000399 4
- studyFactorType Disease State OBI_0001293 5
- studyFactorType Drug Susceptibility IDO_0000469 6
- studyFactorType Extract Molecule FBcv_0010001 7
- studyFactorType Genetic Characteristics OBI_0001404 8
- studyFactorType Immunoprecipitation Antibody OBI_0000690 9
- studyFactorType Organism OBI_0100026 10
- studyFactorType Other OTHER_FACTOR 11
- studyFactorType Passages PASSAGES_FACTOR 12
- studyFactorType Platform OBI_0000050 13
- studyFactorType Sex EFO_0000695 14
- studyFactorType Strain EFO_0005135 15
- studyFactorType Time Point EFO_0000724 16
- studyFactorType Tissue Type BTO_0001384 17
- studyFactorType Treatment Compound EFO_0000369 18
- studyFactorType Treatment Type EFO_0000727 19
- studyAssayMeasurementType cell counting ERO_0001899 0
- studyAssayMeasurementType cell sorting CHMO_0001085 1
- studyAssayMeasurementType clinical chemistry analysis OBI_0000520 2
- studyAssayMeasurementType copy number variation profiling OBI_0000537 3
- studyAssayMeasurementType DNA methylation profiling OBI_0000634 4
- studyAssayMeasurementType DNA methylation profiling (Bisulfite-Seq) OBI_0000748 5
- studyAssayMeasurementType DNA methylation profiling (MeDIP-Seq) _OBI_0000634 6
- studyAssayMeasurementType drug susceptibility _IDO_0000469 7
- studyAssayMeasurementType environmental gene survey ENV_GENE_SURVEY 8
- studyAssayMeasurementType genome sequencing ERO_0001183 9
- studyAssayMeasurementType hematology OBI_0000630 10
- studyAssayMeasurementType histology OBI_0600020 11
- studyAssayMeasurementType Histone Modification (ChIP-Seq) OBI_0002017 12
- studyAssayMeasurementType loss of heterozygosity profiling SO_0001786 13
- studyAssayMeasurementType metabolite profiling OBI_0000366 14
- studyAssayMeasurementType metagenome sequencing METAGENOME_SEQ 15
- studyAssayMeasurementType protein expression profiling OBI_0000615 16
- studyAssayMeasurementType protein identification ERO_0000346 17
- studyAssayMeasurementType protein-DNA binding site identification PROTEIN_DNA_BINDING 18
- studyAssayMeasurementType protein-protein interaction detection OBI_0000288 19
- studyAssayMeasurementType protein-RNA binding (RIP-Seq) PROTEIN_RNA_BINDING 20
- studyAssayMeasurementType SNP analysis OBI_0000435 21
- studyAssayMeasurementType targeted sequencing TARGETED_SEQ 22
- studyAssayMeasurementType transcription factor binding (ChIP-Seq) OBI_0002018 23
- studyAssayMeasurementType transcription factor binding site identification OBI_0000291 24
- studyAssayMeasurementType transcription profiling OBI_0000424 25
- studyAssayMeasurementType transcription profiling EFO_0001032 26
- studyAssayMeasurementType transcription profiling (Microarray) TRANSCRIPTION_PROF 27
- studyAssayMeasurementType transcription profiling (RNA-Seq) OBI_0001271 28
- studyAssayMeasurementType TRAP translational profiling TRAP_TRANS_PROF 29
- studyAssayMeasurementType Other OTHER_MEASUREMENT 30
- studyAssayOrganism Arabidopsis thaliana NCBITaxon_3702 0
- studyAssayOrganism Bos taurus NCBITaxon_9913 1
- studyAssayOrganism Caenorhabditis elegans NCBITaxon_6239 2
- studyAssayOrganism Chlamydomonas reinhardtii NCBITaxon_3055 3
- studyAssayOrganism Danio rerio (zebrafish) NCBITaxon_7955 4
- studyAssayOrganism Dictyostelium discoideum NCBITaxon_44689 5
- studyAssayOrganism Drosophila melanogaster NCBITaxon_7227 6
- studyAssayOrganism Escherichia coli NCBITaxon_562 7
- studyAssayOrganism Hepatitis C virus NCBITaxon_11103 8
- studyAssayOrganism Homo sapiens NCBITaxon_9606 9
- studyAssayOrganism Mus musculus NCBITaxon_10090 10
- studyAssayOrganism Mycobacterium africanum NCBITaxon_33894 11
- studyAssayOrganism Mycobacterium canetti NCBITaxon_78331 12
- studyAssayOrganism Mycobacterium tuberculosis NCBITaxon_1773 13
- studyAssayOrganism Mycoplasma pneumoniae NCBITaxon_2104 14
- studyAssayOrganism Oryza sativa NCBITaxon_4530 15
- studyAssayOrganism Plasmodium falciparum NCBITaxon_5833 16
- studyAssayOrganism Pneumocystis carinii NCBITaxon_4754 17
- studyAssayOrganism Rattus norvegicus NCBITaxon_10116 18
- studyAssayOrganism Saccharomyces cerevisiae (brewer's yeast) NCBITaxon_4932 19
- studyAssayOrganism Schizosaccharomyces pombe NCBITaxon_4896 20
- studyAssayOrganism Takifugu rubripes NCBITaxon_31033 21
- studyAssayOrganism Xenopus laevis NCBITaxon_8355 22
- studyAssayOrganism Zea mays NCBITaxon_4577 23
- studyAssayOrganism Other OTHER_TAXONOMY 24
- studyAssayTechnologyType culture based drug susceptibility testing, single concentration CULTURE_DRUG_TEST_SINGLE 0
- studyAssayTechnologyType culture based drug susceptibility testing, two concentrations CULTURE_DRUG_TEST_TWO 1
- studyAssayTechnologyType culture based drug susceptibility testing, three or more concentrations (minimium inhibitory concentration measurement) CULTURE_DRUG_TEST_THREE 2
- studyAssayTechnologyType DNA microarray OBI_0400148 3
- studyAssayTechnologyType flow cytometry OBI_0000916 4
- studyAssayTechnologyType gel electrophoresis OBI_0600053 5
- studyAssayTechnologyType mass spectrometry OBI_0000470 6
- studyAssayTechnologyType NMR spectroscopy OBI_0000623 7
- studyAssayTechnologyType nucleotide sequencing OBI_0000626 8
- studyAssayTechnologyType protein microarray OBI_0400149 9
- studyAssayTechnologyType real time PCR OBI_0000893 10
- studyAssayTechnologyType no technology required NO_TECHNOLOGY 11
- studyAssayTechnologyType Other OTHER_TECHNOLOGY 12
- studyAssayPlatform 210-MS GC Ion Trap (Varian) 210_MS_GC 0
- studyAssayPlatform 220-MS GC Ion Trap (Varian) 220_MS_GC 1
- studyAssayPlatform 225-MS GC Ion Trap (Varian) 225_MS_GC 2
- studyAssayPlatform 240-MS GC Ion Trap (Varian) 240_MS_GC 3
- studyAssayPlatform 300-MS quadrupole GC/MS (Varian) 300_MS_GCMS 4
- studyAssayPlatform 320-MS LC/MS (Varian) 320_MS_LCMS 5
- studyAssayPlatform 325-MS LC/MS (Varian) 325_MS_LCMS 6
- studyAssayPlatform 320-MS GC/MS (Varian) 500_MS_GCMS 7
- studyAssayPlatform 500-MS LC/MS (Varian) 500_MS_LCMS 8
- studyAssayPlatform 800D (Jeol) 800D 9
- studyAssayPlatform 910-MS TQ-FT (Varian) 910_MS_TQFT 10
- studyAssayPlatform 920-MS TQ-FT (Varian) 920_MS_TQFT 11
- studyAssayPlatform 3100 Mass Detector (Waters) 3100_MASS_D 12
- studyAssayPlatform 6110 Quadrupole LC/MS (Agilent) 6110_QUAD_LCMS 13
- studyAssayPlatform 6120 Quadrupole LC/MS (Agilent) 6120_QUAD_LCMS 14
- studyAssayPlatform 6130 Quadrupole LC/MS (Agilent) 6130_QUAD_LCMS 15
- studyAssayPlatform 6140 Quadrupole LC/MS (Agilent) 6140_QUAD_LCMS 16
- studyAssayPlatform 6310 Ion Trap LC/MS (Agilent) 6310_ION_LCMS 17
- studyAssayPlatform 6320 Ion Trap LC/MS (Agilent) 6320_ION_LCMS 18
- studyAssayPlatform 6330 Ion Trap LC/MS (Agilent) 6330_ION_LCMS 19
- studyAssayPlatform 6340 Ion Trap LC/MS (Agilent) 6340_ION_LCMS 20
- studyAssayPlatform 6410 Triple Quadrupole LC/MS (Agilent) 6410_TRIPLE_LCMS 21
- studyAssayPlatform 6430 Triple Quadrupole LC/MS (Agilent) 6430_TRIPLE_LCMS 22
- studyAssayPlatform 6460 Triple Quadrupole LC/MS (Agilent) 6460_TRIPLE_LCMS 23
- studyAssayPlatform 6490 Triple Quadrupole LC/MS (Agilent) 6490_TRIPLE_LCMS 24
- studyAssayPlatform 6530 Q-TOF LC/MS (Agilent) 6530_Q_TOF_LCMS 25
- studyAssayPlatform 6540 Q-TOF LC/MS (Agilent) 6540_Q_TOF_LCMS 26
- studyAssayPlatform 6210 TOF LC/MS (Agilent) 6210_Q_TOF_LCMS 27
- studyAssayPlatform 6220 TOF LC/MS (Agilent) 6220_Q_TOF_LCMS 28
- studyAssayPlatform 6230 TOF LC/MS (Agilent) 6230_Q_TOF_LCMS 29
- studyAssayPlatform 7000B Triple Quadrupole GC/MS (Agilent) 700B_TRIPLE_GCMS 30
- studyAssayPlatform AccuTO DART (Jeol) ACCUTO_DART 31
- studyAssayPlatform AccuTOF GC (Jeol) ACCUTOF_GC 32
- studyAssayPlatform AccuTOF LC (Jeol) ACCUTOF_LC 33
- studyAssayPlatform ACQUITY SQD (Waters) ACQUITY_SQD 34
- studyAssayPlatform ACQUITY TQD (Waters) ACQUITY_TQD 35
- studyAssayPlatform Agilent AGILENT 36
- studyAssayPlatform Agilent 5975E GC/MSD (Agilent) AGILENT_ 5975E_GCMSD 37
- studyAssayPlatform Agilent 5975T LTM GC/MSD (Agilent) AGILENT_5975T_LTM_GCMSD 38
- studyAssayPlatform 5975C Series GC/MSD (Agilent) 5975C_GCMSD 39
- studyAssayPlatform Affymetrix AFFYMETRIX 40
- studyAssayPlatform amaZon ETD ESI Ion Trap (Bruker) AMAZON_ETD_ESI 41
- studyAssayPlatform amaZon X ESI Ion Trap (Bruker) AMAZON_X_ESI 42
- studyAssayPlatform apex-ultra hybrid Qq-FTMS (Bruker) APEX_ULTRA_QQ_FTMS 43
- studyAssayPlatform API 2000 (AB Sciex) API_2000 44
- studyAssayPlatform API 3200 (AB Sciex) API_3200 45
- studyAssayPlatform API 3200 QTRAP (AB Sciex) API_3200_QTRAP 46
- studyAssayPlatform API 4000 (AB Sciex) API_4000 47
- studyAssayPlatform API 4000 QTRAP (AB Sciex) API_4000_QTRAP 48
- studyAssayPlatform API 5000 (AB Sciex) API_5000 49
- studyAssayPlatform API 5500 (AB Sciex) API_5500 50
- studyAssayPlatform API 5500 QTRAP (AB Sciex) API_5500_QTRAP 51
- studyAssayPlatform Applied Biosystems Group (ABI) APPLIED_BIOSYSTEMS 52
- studyAssayPlatform AQI Biosciences AQI_BIOSCIENCES 53
- studyAssayPlatform Atmospheric Pressure GC (Waters) ATMOS_GC 54
- studyAssayPlatform autoflex III MALDI-TOF MS (Bruker) AUTOFLEX_III_MALDI_TOF_MS 55
- studyAssayPlatform autoflex speed(Bruker) AUTOFLEX_SPEED 56
- studyAssayPlatform AutoSpec Premier (Waters) AUTOSPEC_PREMIER 57
- studyAssayPlatform AXIMA Mega TOF (Shimadzu) AXIMA_MEGA_TOF 58
- studyAssayPlatform AXIMA Performance MALDI TOF/TOF (Shimadzu) AXIMA_PERF_MALDI_TOF 59
- studyAssayPlatform A-10 Analyzer (Apogee) A_10_ANALYZER 60
- studyAssayPlatform A-40-MiniFCM (Apogee) A_40_MINIFCM 61
- studyAssayPlatform Bactiflow (Chemunex SA) BACTIFLOW 62
- studyAssayPlatform Base4innovation BASE4INNOVATION 63
- studyAssayPlatform BD BACTEC MGIT 320 BD_BACTEC_MGIT_320 64
- studyAssayPlatform BD BACTEC MGIT 960 BD_BACTEC_MGIT_960 65
- studyAssayPlatform BD Radiometric BACTEC 460TB BD_RADIO_BACTEC_460TB 66
- studyAssayPlatform BioNanomatrix BIONANOMATRIX 67
- studyAssayPlatform Cell Lab Quanta SC (Becman Coulter) CELL_LAB_QUANTA_SC 68
- studyAssayPlatform Clarus 560 D GC/MS (PerkinElmer) CLARUS_560_D_GCMS 69
- studyAssayPlatform Clarus 560 S GC/MS (PerkinElmer) CLARUS_560_S_GCMS 70
- studyAssayPlatform Clarus 600 GC/MS (PerkinElmer) CLARUS_600_GCMS 71
- studyAssayPlatform Complete Genomics COMPLETE_GENOMICS 72
- studyAssayPlatform Cyan (Dako Cytomation) CYAN 73
- studyAssayPlatform CyFlow ML (Partec) CYFLOW_ML 74
- studyAssayPlatform Cyow SL (Partec) CYFLOW_SL 75
- studyAssayPlatform CyFlow SL3 (Partec) CYFLOW_SL3 76
- studyAssayPlatform CytoBuoy (Cyto Buoy Inc) CYTOBUOY 77
- studyAssayPlatform CytoSence (Cyto Buoy Inc) CYTOSENCE 78
- studyAssayPlatform CytoSub (Cyto Buoy Inc) CYTOSUB 79
- studyAssayPlatform Danaher DANAHER 80
- studyAssayPlatform DFS (Thermo Scientific) DFS 81
- studyAssayPlatform Exactive(Thermo Scientific) EXACTIVE 82
- studyAssayPlatform FACS Canto (Becton Dickinson) FACS_CANTO 83
- studyAssayPlatform FACS Canto2 (Becton Dickinson) FACS_CANTO2 84
- studyAssayPlatform FACS Scan (Becton Dickinson) FACS_SCAN 85
- studyAssayPlatform FC 500 (Becman Coulter) FC_500 86
- studyAssayPlatform GCmate II GC/MS (Jeol) GCMATE_II 87
- studyAssayPlatform GCMS-QP2010 Plus (Shimadzu) GCMS_QP2010_PLUS 88
- studyAssayPlatform GCMS-QP2010S Plus (Shimadzu) GCMS_QP2010S_PLUS 89
- studyAssayPlatform GCT Premier (Waters) GCT_PREMIER 90
- studyAssayPlatform GENEQ GENEQ 91
- studyAssayPlatform Genome Corp. GENOME_CORP 92
- studyAssayPlatform GenoVoxx GENOVOXX 93
- studyAssayPlatform GnuBio GNUBIO 94
- studyAssayPlatform Guava EasyCyte Mini (Millipore) GUAVA_EASYCYTE_MINI 95
- studyAssayPlatform Guava EasyCyte Plus (Millipore) GUAVA_EASYCYTE_PLUS 96
- studyAssayPlatform Guava Personal Cell Analysis (Millipore) GUAVA_PERSONAL_CELL 97
- studyAssayPlatform Guava Personal Cell Analysis-96 (Millipore) GUAVA_PERSONAL_CELL_96 98
- studyAssayPlatform Helicos BioSciences HELICOS_BIO 99
- studyAssayPlatform Illumina ILLUMINA 100
- studyAssayPlatform Indirect proportion method on LJ medium INDIRECT_LJ_MEDIUM 101
- studyAssayPlatform Indirect proportion method on Middlebrook Agar 7H9 INDIRECT_AGAR_7H9 102
- studyAssayPlatform Indirect proportion method on Middlebrook Agar 7H10 INDIRECT_AGAR_7H10 103
- studyAssayPlatform Indirect proportion method on Middlebrook Agar 7H11 INDIRECT_AGAR_7H11 104
- studyAssayPlatform inFlux Analyzer (Cytopeia) INFLUX_ANALYZER 105
- studyAssayPlatform Intelligent Bio-Systems INTELLIGENT_BIOSYSTEMS 106
- studyAssayPlatform ITQ 700 (Thermo Scientific) ITQ_700 107
- studyAssayPlatform ITQ 900 (Thermo Scientific) ITQ_900 108
- studyAssayPlatform ITQ 1100 (Thermo Scientific) ITQ_1100 109
- studyAssayPlatform JMS-53000 SpiralTOF (Jeol) JMS_53000_SPIRAL 110
- studyAssayPlatform LaserGen LASERGEN 111
- studyAssayPlatform LCMS-2020 (Shimadzu) LCMS_2020 112
- studyAssayPlatform LCMS-2010EV (Shimadzu) LCMS_2010EV 113
- studyAssayPlatform LCMS-IT-TOF (Shimadzu) LCMS_IT_TOF 114
- studyAssayPlatform Li-Cor LI_COR 115
- studyAssayPlatform Life Tech LIFE_TECH 116
- studyAssayPlatform LightSpeed Genomics LIGHTSPEED_GENOMICS 117
- studyAssayPlatform LCT Premier XE (Waters) LCT_PREMIER_XE 118
- studyAssayPlatform LCQ Deca XP MAX (Thermo Scientific) LCQ_DECA_XP_MAX 119
- studyAssayPlatform LCQ Fleet (Thermo Scientific) LCQ_FLEET 120
- studyAssayPlatform LXQ (Thermo Scientific) LXQ_THERMO 121
- studyAssayPlatform LTQ Classic (Thermo Scientific) LTQ_CLASSIC 122
- studyAssayPlatform LTQ XL (Thermo Scientific) LTQ_XL 123
- studyAssayPlatform LTQ Velos (Thermo Scientific) LTQ_VELOS 124
- studyAssayPlatform LTQ Orbitrap Classic (Thermo Scientific) LTQ_ORBITRAP_CLASSIC 125
- studyAssayPlatform LTQ Orbitrap XL (Thermo Scientific) LTQ_ORBITRAP_XL 126
- studyAssayPlatform LTQ Orbitrap Discovery (Thermo Scientific) LTQ_ORBITRAP_DISCOVERY 127
- studyAssayPlatform LTQ Orbitrap Velos (Thermo Scientific) LTQ_ORBITRAP_VELOS 128
- studyAssayPlatform Luminex 100 (Luminex) LUMINEX_100 129
- studyAssayPlatform Luminex 200 (Luminex) LUMINEX_200 130
- studyAssayPlatform MACS Quant (Miltenyi) MACS_QUANT 131
- studyAssayPlatform MALDI SYNAPT G2 HDMS (Waters) MALDI_SYNAPT_G2_HDMS 132
- studyAssayPlatform MALDI SYNAPT G2 MS (Waters) MALDI_SYNAPT_G2_MS 133
- studyAssayPlatform MALDI SYNAPT HDMS (Waters) MALDI_SYNAPT_HDMS 134
- studyAssayPlatform MALDI SYNAPT MS (Waters) MALDI_SYNAPT_MS 135
- studyAssayPlatform MALDI micro MX (Waters) MALDI_MICROMX 136
- studyAssayPlatform maXis (Bruker) MAXIS 137
- studyAssayPlatform maXis G4 (Bruker) MAXISG4 138
- studyAssayPlatform microflex LT MALDI-TOF MS (Bruker) MICROFLEX_LT_MALDI_TOF_MS 139
- studyAssayPlatform microflex LRF MALDI-TOF MS (Bruker) MICROFLEX_LRF_MALDI_TOF_MS 140
- studyAssayPlatform microflex III MALDI-TOF MS (Bruker) MICROFLEX_III_TOF_MS 141
- studyAssayPlatform micrOTOF II ESI TOF (Bruker) MICROTOF_II_ESI_TOF 142
- studyAssayPlatform micrOTOF-Q II ESI-Qq-TOF (Bruker) MICROTOF_Q_II_ESI_QQ_TOF 143
- studyAssayPlatform microplate Alamar Blue (resazurin) colorimetric method MICROPLATE_ALAMAR_BLUE_COLORIMETRIC 144
- studyAssayPlatform Mstation (Jeol) MSTATION 145
- studyAssayPlatform MSQ Plus (Thermo Scientific) MSQ_PLUS 146
- studyAssayPlatform NABsys NABSYS 147
- studyAssayPlatform Nanophotonics Biosciences NANOPHOTONICS_BIOSCIENCES 148
- studyAssayPlatform Network Biosystems NETWORK_BIOSYSTEMS 149
- studyAssayPlatform Nimblegen NIMBLEGEN 150
- studyAssayPlatform Oxford Nanopore Technologies OXFORD_NANOPORE_TECHNOLOGIES 151
- studyAssayPlatform Pacific Biosciences PACIFIC_BIOSCIENCES 152
- studyAssayPlatform Population Genetics Technologies POPULATION_GENETICS_TECHNOLOGIES 153
- studyAssayPlatform Q1000GC UltraQuad (Jeol) Q1000GC_ULTRAQUAD 154
- studyAssayPlatform Quattro micro API (Waters) QUATTRO_MICRO_API 155
- studyAssayPlatform Quattro micro GC (Waters) QUATTRO_MICRO_GC 156
- studyAssayPlatform Quattro Premier XE (Waters) QUATTRO_PREMIER_XE 157
- studyAssayPlatform QSTAR (AB Sciex) QSTAR 158
- studyAssayPlatform Reveo REVEO 159
- studyAssayPlatform Roche ROCHE 160
- studyAssayPlatform Seirad SEIRAD 161
- studyAssayPlatform solariX hybrid Qq-FTMS (Bruker) SOLARIX_HYBRID_QQ_FTMS 162
- studyAssayPlatform Somacount (Bently Instruments) SOMACOUNT 163
- studyAssayPlatform SomaScope (Bently Instruments) SOMASCOPE 164
- studyAssayPlatform SYNAPT G2 HDMS (Waters) SYNAPT_G2_HDMS 165
- studyAssayPlatform SYNAPT G2 MS (Waters) SYNAPT_G2_MS 166
- studyAssayPlatform SYNAPT HDMS (Waters) SYNAPT_HDMS 167
- studyAssayPlatform SYNAPT MS (Waters) SYNAPT_MS 168
- studyAssayPlatform TripleTOF 5600 (AB Sciex) TRIPLETOF_5600 169
- studyAssayPlatform TSQ Quantum Ultra (Thermo Scientific) TSQ_QUANTUM_ULTRA 170
- studyAssayPlatform TSQ Quantum Access (Thermo Scientific) TSQ_QUANTUM_ACCESS 171
- studyAssayPlatform TSQ Quantum Access MAX (Thermo Scientific) TSQ_QUANTUM_ACCESS_MAX 172
- studyAssayPlatform TSQ Quantum Discovery MAX (Thermo Scientific) TSQ_QUANTUM_DISCOVERY_MAX 173
- studyAssayPlatform TSQ Quantum GC (Thermo Scientific) TSQ_QUANTUM_GC 174
- studyAssayPlatform TSQ Quantum XLS (Thermo Scientific) TSQ_QUANTUM_XLS 175
- studyAssayPlatform TSQ Vantage (Thermo Scientific) TSQ_VANTAGE 176
- studyAssayPlatform ultrafleXtreme MALDI-TOF MS (Bruker) ULTRAFLEXTREME_MALDI_TOF_MS 177
- studyAssayPlatform VisiGen Biotechnologies VISIGEN_BIO 178
- studyAssayPlatform Xevo G2 QTOF (Waters) XEVO_G2_QTOF 179
- studyAssayPlatform Xevo QTof MS (Waters) XEVO_QTOF_MS 180
- studyAssayPlatform Xevo TQ MS (Waters) XEVO_TQ_MS 181
- studyAssayPlatform Xevo TQ-S (Waters) XEVO_TQ_S 182
+#metadataBlock name dataverseAlias displayName
+ biomedical Life Sciences Metadata
+#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id
+ studyDesignType Design Type Design types that are based on the overall experimental design. text 0 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
+ studyOtherDesignType Other Design Type If Other was selected in Design Type, list any other design types that were used in this Dataset. text 1 TRUE FALSE TRUE TRUE FALSE FALSE biomedical
+ studyFactorType Factor Type Factors used in the Dataset. text 2 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
+ studyOtherFactorType Other Factor Type If Other was selected in Factor Type, list any other factor types that were used in this Dataset. text 3 TRUE FALSE TRUE TRUE FALSE FALSE biomedical
+ studyAssayOrganism Organism The taxonomic name of the organism used in the Dataset or from which the starting biological material derives. text 4 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
+ studyAssayOtherOrganism Other Organism If Other was selected in Organism, list any other organisms that were used in this Dataset. Terms from the NCBI Taxonomy are recommended. text 5 TRUE FALSE TRUE TRUE FALSE FALSE biomedical
+ studyAssayMeasurementType Measurement Type A term to qualify the endpoint, or what is being measured (e.g. gene expression profiling; protein identification). text 6 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
+ studyAssayOtherMeasurmentType Other Measurement Type If Other was selected in Measurement Type, list any other measurement types that were used. Terms from NCBO Bioportal are recommended. text 7 TRUE FALSE TRUE TRUE FALSE FALSE biomedical
+ studyAssayTechnologyType Technology Type A term to identify the technology used to perform the measurement (e.g. DNA microarray; mass spectrometry). text 8 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
+ studyAssayOtherTechnologyType Other Technology Type If Other was selected in Technology Type, list any other technology types that were used in this Dataset. text 9 TRUE FALSE TRUE TRUE FALSE FALSE biomedical
+ studyAssayPlatform Technology Platform The manufacturer and name of the technology platform used in the assay (e.g. Bruker AVANCE). text 10 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
+ studyAssayOtherPlatform Other Technology Platform If Other was selected in Technology Platform, list any other technology platforms that were used in this Dataset. text 11 TRUE FALSE TRUE TRUE FALSE FALSE biomedical
+ studyAssayCellType Cell Type The name of the cell line from which the source or sample derives. text 12 TRUE TRUE TRUE TRUE FALSE FALSE biomedical
+#controlledVocabulary DatasetField Value identifier displayOrder
+ studyDesignType Case Control EFO_0001427 0
+ studyDesignType Cross Sectional EFO_0001428 1
+ studyDesignType Cohort Study OCRE100078 2
+ studyDesignType Nested Case Control Design NCI_C48202 3
+ studyDesignType Not Specified NOT_SPECIFIED 4
+ studyDesignType Parallel Group Design OBI_0500006 5
+ studyDesignType Perturbation Design OBI_0001033 6
+ studyDesignType Randomized Controlled Trial MESH_D016449 7
+ studyDesignType Technological Design TECH_DESIGN 8
+ studyDesignType Other OTHER_DESIGN 9
+ studyFactorType Age EFO_0000246 0
+ studyFactorType Biomarkers BIOMARKERS 1
+ studyFactorType Cell Surface Markers CELL_SURFACE_M 2
+ studyFactorType Cell Type/Cell Line EFO_0000324;EFO_0000322 3
+ studyFactorType Developmental Stage EFO_0000399 4
+ studyFactorType Disease State OBI_0001293 5
+ studyFactorType Drug Susceptibility IDO_0000469 6
+ studyFactorType Extract Molecule FBcv_0010001 7
+ studyFactorType Genetic Characteristics OBI_0001404 8
+ studyFactorType Immunoprecipitation Antibody OBI_0000690 9
+ studyFactorType Organism OBI_0100026 10
+ studyFactorType Passages PASSAGES_FACTOR 11
+ studyFactorType Platform OBI_0000050 12
+ studyFactorType Sex EFO_0000695 13
+ studyFactorType Strain EFO_0005135 14
+ studyFactorType Time Point EFO_0000724 15
+ studyFactorType Tissue Type BTO_0001384 16
+ studyFactorType Treatment Compound EFO_0000369 17
+ studyFactorType Treatment Type EFO_0000727 18
+ studyFactorType Other OTHER_FACTOR 19
+ studyAssayMeasurementType cell sorting CHMO_0001085 1
+ studyAssayMeasurementType clinical chemistry analysis OBI_0000520 2
+ studyAssayMeasurementType copy number variation profiling OBI_0000537 3
+ studyAssayMeasurementType DNA methylation profiling OBI_0000634 4
+ studyAssayMeasurementType DNA methylation profiling (Bisulfite-Seq) OBI_0000748 5
+ studyAssayMeasurementType DNA methylation profiling (MeDIP-Seq) _OBI_0000634 6
+ studyAssayMeasurementType drug susceptibility _IDO_0000469 7
+ studyAssayMeasurementType environmental gene survey ENV_GENE_SURVEY 8
+ studyAssayMeasurementType genome sequencing ERO_0001183 9
+ studyAssayMeasurementType hematology OBI_0000630 10
+ studyAssayMeasurementType histology OBI_0600020 11
+ studyAssayMeasurementType Histone Modification (ChIP-Seq) OBI_0002017 12
+ studyAssayMeasurementType loss of heterozygosity profiling SO_0001786 13
+ studyAssayMeasurementType metabolite profiling OBI_0000366 14
+ studyAssayMeasurementType metagenome sequencing METAGENOME_SEQ 15
+ studyAssayMeasurementType protein expression profiling OBI_0000615 16
+ studyAssayMeasurementType protein identification ERO_0000346 17
+ studyAssayMeasurementType protein-DNA binding site identification PROTEIN_DNA_BINDING 18
+ studyAssayMeasurementType protein-protein interaction detection OBI_0000288 19
+ studyAssayMeasurementType protein-RNA binding (RIP-Seq) PROTEIN_RNA_BINDING 20
+ studyAssayMeasurementType SNP analysis OBI_0000435 21
+ studyAssayMeasurementType targeted sequencing TARGETED_SEQ 22
+ studyAssayMeasurementType transcription factor binding (ChIP-Seq) OBI_0002018 23
+ studyAssayMeasurementType transcription factor binding site identification OBI_0000291 24
+ studyAssayMeasurementType transcription profiling OBI_0000424 25
+ studyAssayMeasurementType transcription profiling EFO_0001032 26
+ studyAssayMeasurementType transcription profiling (Microarray) TRANSCRIPTION_PROF 27
+ studyAssayMeasurementType transcription profiling (RNA-Seq) OBI_0001271 28
+ studyAssayMeasurementType TRAP translational profiling TRAP_TRANS_PROF 29
+ studyAssayMeasurementType Other OTHER_MEASUREMENT 30
+ studyAssayOrganism Arabidopsis thaliana NCBITaxon_3702 0
+ studyAssayOrganism Bos taurus NCBITaxon_9913 1
+ studyAssayOrganism Caenorhabditis elegans NCBITaxon_6239 2
+ studyAssayOrganism Chlamydomonas reinhardtii NCBITaxon_3055 3
+ studyAssayOrganism Danio rerio (zebrafish) NCBITaxon_7955 4
+ studyAssayOrganism Dictyostelium discoideum NCBITaxon_44689 5
+ studyAssayOrganism Drosophila melanogaster NCBITaxon_7227 6
+ studyAssayOrganism Escherichia coli NCBITaxon_562 7
+ studyAssayOrganism Hepatitis C virus NCBITaxon_11103 8
+ studyAssayOrganism Homo sapiens NCBITaxon_9606 9
+ studyAssayOrganism Mus musculus NCBITaxon_10090 10
+ studyAssayOrganism Mycobacterium africanum NCBITaxon_33894 11
+ studyAssayOrganism Mycobacterium canetti NCBITaxon_78331 12
+ studyAssayOrganism Mycobacterium tuberculosis NCBITaxon_1773 13
+ studyAssayOrganism Mycoplasma pneumoniae NCBITaxon_2104 14
+ studyAssayOrganism Oryza sativa NCBITaxon_4530 15
+ studyAssayOrganism Plasmodium falciparum NCBITaxon_5833 16
+ studyAssayOrganism Pneumocystis carinii NCBITaxon_4754 17
+ studyAssayOrganism Rattus norvegicus NCBITaxon_10116 18
+ studyAssayOrganism Saccharomyces cerevisiae (brewer's yeast) NCBITaxon_4932 19
+ studyAssayOrganism Schizosaccharomyces pombe NCBITaxon_4896 20
+ studyAssayOrganism Takifugu rubripes NCBITaxon_31033 21
+ studyAssayOrganism Xenopus laevis NCBITaxon_8355 22
+ studyAssayOrganism Zea mays NCBITaxon_4577 23
+ studyAssayOrganism Other OTHER_TAXONOMY 24
+ studyAssayTechnologyType culture based drug susceptibility testing, single concentration CULTURE_DRUG_TEST_SINGLE 0
+ studyAssayTechnologyType culture based drug susceptibility testing, two concentrations CULTURE_DRUG_TEST_TWO 1
+ studyAssayTechnologyType culture based drug susceptibility testing, three or more concentrations (minimium inhibitory concentration measurement) CULTURE_DRUG_TEST_THREE 2
+ studyAssayTechnologyType DNA microarray OBI_0400148 3
+ studyAssayTechnologyType flow cytometry OBI_0000916 4
+ studyAssayTechnologyType gel electrophoresis OBI_0600053 5
+ studyAssayTechnologyType mass spectrometry OBI_0000470 6
+ studyAssayTechnologyType NMR spectroscopy OBI_0000623 7
+ studyAssayTechnologyType nucleotide sequencing OBI_0000626 8
+ studyAssayTechnologyType protein microarray OBI_0400149 9
+ studyAssayTechnologyType real time PCR OBI_0000893 10
+ studyAssayTechnologyType no technology required NO_TECHNOLOGY 11
+ studyAssayTechnologyType Other OTHER_TECHNOLOGY 12
+ studyAssayPlatform 210-MS GC Ion Trap (Varian) 210_MS_GC 0
+ studyAssayPlatform 220-MS GC Ion Trap (Varian) 220_MS_GC 1
+ studyAssayPlatform 225-MS GC Ion Trap (Varian) 225_MS_GC 2
+ studyAssayPlatform 240-MS GC Ion Trap (Varian) 240_MS_GC 3
+ studyAssayPlatform 300-MS quadrupole GC/MS (Varian) 300_MS_GCMS 4
+ studyAssayPlatform 320-MS LC/MS (Varian) 320_MS_LCMS 5
+ studyAssayPlatform 325-MS LC/MS (Varian) 325_MS_LCMS 6
+ studyAssayPlatform 320-MS GC/MS (Varian) 500_MS_GCMS 7
+ studyAssayPlatform 500-MS LC/MS (Varian) 500_MS_LCMS 8
+ studyAssayPlatform 800D (Jeol) 800D 9
+ studyAssayPlatform 910-MS TQ-FT (Varian) 910_MS_TQFT 10
+ studyAssayPlatform 920-MS TQ-FT (Varian) 920_MS_TQFT 11
+ studyAssayPlatform 3100 Mass Detector (Waters) 3100_MASS_D 12
+ studyAssayPlatform 6110 Quadrupole LC/MS (Agilent) 6110_QUAD_LCMS 13
+ studyAssayPlatform 6120 Quadrupole LC/MS (Agilent) 6120_QUAD_LCMS 14
+ studyAssayPlatform 6130 Quadrupole LC/MS (Agilent) 6130_QUAD_LCMS 15
+ studyAssayPlatform 6140 Quadrupole LC/MS (Agilent) 6140_QUAD_LCMS 16
+ studyAssayPlatform 6310 Ion Trap LC/MS (Agilent) 6310_ION_LCMS 17
+ studyAssayPlatform 6320 Ion Trap LC/MS (Agilent) 6320_ION_LCMS 18
+ studyAssayPlatform 6330 Ion Trap LC/MS (Agilent) 6330_ION_LCMS 19
+ studyAssayPlatform 6340 Ion Trap LC/MS (Agilent) 6340_ION_LCMS 20
+ studyAssayPlatform 6410 Triple Quadrupole LC/MS (Agilent) 6410_TRIPLE_LCMS 21
+ studyAssayPlatform 6430 Triple Quadrupole LC/MS (Agilent) 6430_TRIPLE_LCMS 22
+ studyAssayPlatform 6460 Triple Quadrupole LC/MS (Agilent) 6460_TRIPLE_LCMS 23
+ studyAssayPlatform 6490 Triple Quadrupole LC/MS (Agilent) 6490_TRIPLE_LCMS 24
+ studyAssayPlatform 6530 Q-TOF LC/MS (Agilent) 6530_Q_TOF_LCMS 25
+ studyAssayPlatform 6540 Q-TOF LC/MS (Agilent) 6540_Q_TOF_LCMS 26
+ studyAssayPlatform 6210 TOF LC/MS (Agilent) 6210_Q_TOF_LCMS 27
+ studyAssayPlatform 6220 TOF LC/MS (Agilent) 6220_Q_TOF_LCMS 28
+ studyAssayPlatform 6230 TOF LC/MS (Agilent) 6230_Q_TOF_LCMS 29
+ studyAssayPlatform 7000B Triple Quadrupole GC/MS (Agilent) 700B_TRIPLE_GCMS 30
+ studyAssayPlatform AccuTO DART (Jeol) ACCUTO_DART 31
+ studyAssayPlatform AccuTOF GC (Jeol) ACCUTOF_GC 32
+ studyAssayPlatform AccuTOF LC (Jeol) ACCUTOF_LC 33
+ studyAssayPlatform ACQUITY SQD (Waters) ACQUITY_SQD 34
+ studyAssayPlatform ACQUITY TQD (Waters) ACQUITY_TQD 35
+ studyAssayPlatform Agilent AGILENT 36
+ studyAssayPlatform Agilent 5975E GC/MSD (Agilent) AGILENT_ 5975E_GCMSD 37
+ studyAssayPlatform Agilent 5975T LTM GC/MSD (Agilent) AGILENT_5975T_LTM_GCMSD 38
+ studyAssayPlatform 5975C Series GC/MSD (Agilent) 5975C_GCMSD 39
+ studyAssayPlatform Affymetrix AFFYMETRIX 40
+ studyAssayPlatform amaZon ETD ESI Ion Trap (Bruker) AMAZON_ETD_ESI 41
+ studyAssayPlatform amaZon X ESI Ion Trap (Bruker) AMAZON_X_ESI 42
+ studyAssayPlatform apex-ultra hybrid Qq-FTMS (Bruker) APEX_ULTRA_QQ_FTMS 43
+ studyAssayPlatform API 2000 (AB Sciex) API_2000 44
+ studyAssayPlatform API 3200 (AB Sciex) API_3200 45
+ studyAssayPlatform API 3200 QTRAP (AB Sciex) API_3200_QTRAP 46
+ studyAssayPlatform API 4000 (AB Sciex) API_4000 47
+ studyAssayPlatform API 4000 QTRAP (AB Sciex) API_4000_QTRAP 48
+ studyAssayPlatform API 5000 (AB Sciex) API_5000 49
+ studyAssayPlatform API 5500 (AB Sciex) API_5500 50
+ studyAssayPlatform API 5500 QTRAP (AB Sciex) API_5500_QTRAP 51
+ studyAssayPlatform Applied Biosystems Group (ABI) APPLIED_BIOSYSTEMS 52
+ studyAssayPlatform AQI Biosciences AQI_BIOSCIENCES 53
+ studyAssayPlatform Atmospheric Pressure GC (Waters) ATMOS_GC 54
+ studyAssayPlatform autoflex III MALDI-TOF MS (Bruker) AUTOFLEX_III_MALDI_TOF_MS 55
+ studyAssayPlatform autoflex speed(Bruker) AUTOFLEX_SPEED 56
+ studyAssayPlatform AutoSpec Premier (Waters) AUTOSPEC_PREMIER 57
+ studyAssayPlatform AXIMA Mega TOF (Shimadzu) AXIMA_MEGA_TOF 58
+ studyAssayPlatform AXIMA Performance MALDI TOF/TOF (Shimadzu) AXIMA_PERF_MALDI_TOF 59
+ studyAssayPlatform A-10 Analyzer (Apogee) A_10_ANALYZER 60
+ studyAssayPlatform A-40-MiniFCM (Apogee) A_40_MINIFCM 61
+ studyAssayPlatform Bactiflow (Chemunex SA) BACTIFLOW 62
+ studyAssayPlatform Base4innovation BASE4INNOVATION 63
+ studyAssayPlatform BD BACTEC MGIT 320 BD_BACTEC_MGIT_320 64
+ studyAssayPlatform BD BACTEC MGIT 960 BD_BACTEC_MGIT_960 65
+ studyAssayPlatform BD Radiometric BACTEC 460TB BD_RADIO_BACTEC_460TB 66
+ studyAssayPlatform BioNanomatrix BIONANOMATRIX 67
+ studyAssayPlatform Cell Lab Quanta SC (Becman Coulter) CELL_LAB_QUANTA_SC 68
+ studyAssayPlatform Clarus 560 D GC/MS (PerkinElmer) CLARUS_560_D_GCMS 69
+ studyAssayPlatform Clarus 560 S GC/MS (PerkinElmer) CLARUS_560_S_GCMS 70
+ studyAssayPlatform Clarus 600 GC/MS (PerkinElmer) CLARUS_600_GCMS 71
+ studyAssayPlatform Complete Genomics COMPLETE_GENOMICS 72
+ studyAssayPlatform Cyan (Dako Cytomation) CYAN 73
+ studyAssayPlatform CyFlow ML (Partec) CYFLOW_ML 74
+ studyAssayPlatform Cyow SL (Partec) CYFLOW_SL 75
+ studyAssayPlatform CyFlow SL3 (Partec) CYFLOW_SL3 76
+ studyAssayPlatform CytoBuoy (Cyto Buoy Inc) CYTOBUOY 77
+ studyAssayPlatform CytoSence (Cyto Buoy Inc) CYTOSENCE 78
+ studyAssayPlatform CytoSub (Cyto Buoy Inc) CYTOSUB 79
+ studyAssayPlatform Danaher DANAHER 80
+ studyAssayPlatform DFS (Thermo Scientific) DFS 81
+ studyAssayPlatform Exactive(Thermo Scientific) EXACTIVE 82
+ studyAssayPlatform FACS Canto (Becton Dickinson) FACS_CANTO 83
+ studyAssayPlatform FACS Canto2 (Becton Dickinson) FACS_CANTO2 84
+ studyAssayPlatform FACS Scan (Becton Dickinson) FACS_SCAN 85
+ studyAssayPlatform FC 500 (Becman Coulter) FC_500 86
+ studyAssayPlatform GCmate II GC/MS (Jeol) GCMATE_II 87
+ studyAssayPlatform GCMS-QP2010 Plus (Shimadzu) GCMS_QP2010_PLUS 88
+ studyAssayPlatform GCMS-QP2010S Plus (Shimadzu) GCMS_QP2010S_PLUS 89
+ studyAssayPlatform GCT Premier (Waters) GCT_PREMIER 90
+ studyAssayPlatform GENEQ GENEQ 91
+ studyAssayPlatform Genome Corp. GENOME_CORP 92
+ studyAssayPlatform GenoVoxx GENOVOXX 93
+ studyAssayPlatform GnuBio GNUBIO 94
+ studyAssayPlatform Guava EasyCyte Mini (Millipore) GUAVA_EASYCYTE_MINI 95
+ studyAssayPlatform Guava EasyCyte Plus (Millipore) GUAVA_EASYCYTE_PLUS 96
+ studyAssayPlatform Guava Personal Cell Analysis (Millipore) GUAVA_PERSONAL_CELL 97
+ studyAssayPlatform Guava Personal Cell Analysis-96 (Millipore) GUAVA_PERSONAL_CELL_96 98
+ studyAssayPlatform Helicos BioSciences HELICOS_BIO 99
+ studyAssayPlatform Illumina ILLUMINA 100
+ studyAssayPlatform Indirect proportion method on LJ medium INDIRECT_LJ_MEDIUM 101
+ studyAssayPlatform Indirect proportion method on Middlebrook Agar 7H9 INDIRECT_AGAR_7H9 102
+ studyAssayPlatform Indirect proportion method on Middlebrook Agar 7H10 INDIRECT_AGAR_7H10 103
+ studyAssayPlatform Indirect proportion method on Middlebrook Agar 7H11 INDIRECT_AGAR_7H11 104
+ studyAssayPlatform inFlux Analyzer (Cytopeia) INFLUX_ANALYZER 105
+ studyAssayPlatform Intelligent Bio-Systems INTELLIGENT_BIOSYSTEMS 106
+ studyAssayPlatform ITQ 700 (Thermo Scientific) ITQ_700 107
+ studyAssayPlatform ITQ 900 (Thermo Scientific) ITQ_900 108
+ studyAssayPlatform ITQ 1100 (Thermo Scientific) ITQ_1100 109
+ studyAssayPlatform JMS-53000 SpiralTOF (Jeol) JMS_53000_SPIRAL 110
+ studyAssayPlatform LaserGen LASERGEN 111
+ studyAssayPlatform LCMS-2020 (Shimadzu) LCMS_2020 112
+ studyAssayPlatform LCMS-2010EV (Shimadzu) LCMS_2010EV 113
+ studyAssayPlatform LCMS-IT-TOF (Shimadzu) LCMS_IT_TOF 114
+ studyAssayPlatform Li-Cor LI_COR 115
+ studyAssayPlatform Life Tech LIFE_TECH 116
+ studyAssayPlatform LightSpeed Genomics LIGHTSPEED_GENOMICS 117
+ studyAssayPlatform LCT Premier XE (Waters) LCT_PREMIER_XE 118
+ studyAssayPlatform LCQ Deca XP MAX (Thermo Scientific) LCQ_DECA_XP_MAX 119
+ studyAssayPlatform LCQ Fleet (Thermo Scientific) LCQ_FLEET 120
+ studyAssayPlatform LXQ (Thermo Scientific) LXQ_THERMO 121
+ studyAssayPlatform LTQ Classic (Thermo Scientific) LTQ_CLASSIC 122
+ studyAssayPlatform LTQ XL (Thermo Scientific) LTQ_XL 123
+ studyAssayPlatform LTQ Velos (Thermo Scientific) LTQ_VELOS 124
+ studyAssayPlatform LTQ Orbitrap Classic (Thermo Scientific) LTQ_ORBITRAP_CLASSIC 125
+ studyAssayPlatform LTQ Orbitrap XL (Thermo Scientific) LTQ_ORBITRAP_XL 126
+ studyAssayPlatform LTQ Orbitrap Discovery (Thermo Scientific) LTQ_ORBITRAP_DISCOVERY 127
+ studyAssayPlatform LTQ Orbitrap Velos (Thermo Scientific) LTQ_ORBITRAP_VELOS 128
+ studyAssayPlatform Luminex 100 (Luminex) LUMINEX_100 129
+ studyAssayPlatform Luminex 200 (Luminex) LUMINEX_200 130
+ studyAssayPlatform MACS Quant (Miltenyi) MACS_QUANT 131
+ studyAssayPlatform MALDI SYNAPT G2 HDMS (Waters) MALDI_SYNAPT_G2_HDMS 132
+ studyAssayPlatform MALDI SYNAPT G2 MS (Waters) MALDI_SYNAPT_G2_MS 133
+ studyAssayPlatform MALDI SYNAPT HDMS (Waters) MALDI_SYNAPT_HDMS 134
+ studyAssayPlatform MALDI SYNAPT MS (Waters) MALDI_SYNAPT_MS 135
+ studyAssayPlatform MALDI micro MX (Waters) MALDI_MICROMX 136
+ studyAssayPlatform maXis (Bruker) MAXIS 137
+ studyAssayPlatform maXis G4 (Bruker) MAXISG4 138
+ studyAssayPlatform microflex LT MALDI-TOF MS (Bruker) MICROFLEX_LT_MALDI_TOF_MS 139
+ studyAssayPlatform microflex LRF MALDI-TOF MS (Bruker) MICROFLEX_LRF_MALDI_TOF_MS 140
+ studyAssayPlatform microflex III MALDI-TOF MS (Bruker) MICROFLEX_III_TOF_MS 141
+ studyAssayPlatform micrOTOF II ESI TOF (Bruker) MICROTOF_II_ESI_TOF 142
+ studyAssayPlatform micrOTOF-Q II ESI-Qq-TOF (Bruker) MICROTOF_Q_II_ESI_QQ_TOF 143
+ studyAssayPlatform microplate Alamar Blue (resazurin) colorimetric method MICROPLATE_ALAMAR_BLUE_COLORIMETRIC 144
+ studyAssayPlatform Mstation (Jeol) MSTATION 145
+ studyAssayPlatform MSQ Plus (Thermo Scientific) MSQ_PLUS 146
+ studyAssayPlatform NABsys NABSYS 147
+ studyAssayPlatform Nanophotonics Biosciences NANOPHOTONICS_BIOSCIENCES 148
+ studyAssayPlatform Network Biosystems NETWORK_BIOSYSTEMS 149
+ studyAssayPlatform Nimblegen NIMBLEGEN 150
+ studyAssayPlatform Oxford Nanopore Technologies OXFORD_NANOPORE_TECHNOLOGIES 151
+ studyAssayPlatform Pacific Biosciences PACIFIC_BIOSCIENCES 152
+ studyAssayPlatform Population Genetics Technologies POPULATION_GENETICS_TECHNOLOGIES 153
+ studyAssayPlatform Q1000GC UltraQuad (Jeol) Q1000GC_ULTRAQUAD 154
+ studyAssayPlatform Quattro micro API (Waters) QUATTRO_MICRO_API 155
+ studyAssayPlatform Quattro micro GC (Waters) QUATTRO_MICRO_GC 156
+ studyAssayPlatform Quattro Premier XE (Waters) QUATTRO_PREMIER_XE 157
+ studyAssayPlatform QSTAR (AB Sciex) QSTAR 158
+ studyAssayPlatform Reveo REVEO 159
+ studyAssayPlatform Roche ROCHE 160
+ studyAssayPlatform Seirad SEIRAD 161
+ studyAssayPlatform solariX hybrid Qq-FTMS (Bruker) SOLARIX_HYBRID_QQ_FTMS 162
+ studyAssayPlatform Somacount (Bently Instruments) SOMACOUNT 163
+ studyAssayPlatform SomaScope (Bently Instruments) SOMASCOPE 164
+ studyAssayPlatform SYNAPT G2 HDMS (Waters) SYNAPT_G2_HDMS 165
+ studyAssayPlatform SYNAPT G2 MS (Waters) SYNAPT_G2_MS 166
+ studyAssayPlatform SYNAPT HDMS (Waters) SYNAPT_HDMS 167
+ studyAssayPlatform SYNAPT MS (Waters) SYNAPT_MS 168
+ studyAssayPlatform TripleTOF 5600 (AB Sciex) TRIPLETOF_5600 169
+ studyAssayPlatform TSQ Quantum Ultra (Thermo Scientific) TSQ_QUANTUM_ULTRA 170
+ studyAssayPlatform TSQ Quantum Access (Thermo Scientific) TSQ_QUANTUM_ACCESS 171
+ studyAssayPlatform TSQ Quantum Access MAX (Thermo Scientific) TSQ_QUANTUM_ACCESS_MAX 172
+ studyAssayPlatform TSQ Quantum Discovery MAX (Thermo Scientific) TSQ_QUANTUM_DISCOVERY_MAX 173
+ studyAssayPlatform TSQ Quantum GC (Thermo Scientific) TSQ_QUANTUM_GC 174
+ studyAssayPlatform TSQ Quantum XLS (Thermo Scientific) TSQ_QUANTUM_XLS 175
+ studyAssayPlatform TSQ Vantage (Thermo Scientific) TSQ_VANTAGE 176
+ studyAssayPlatform ultrafleXtreme MALDI-TOF MS (Bruker) ULTRAFLEXTREME_MALDI_TOF_MS 177
+ studyAssayPlatform VisiGen Biotechnologies VISIGEN_BIO 178
+ studyAssayPlatform Xevo G2 QTOF (Waters) XEVO_G2_QTOF 179
+ studyAssayPlatform Xevo QTof MS (Waters) XEVO_QTOF_MS 180
+ studyAssayPlatform Xevo TQ MS (Waters) XEVO_TQ_MS 181
+ studyAssayPlatform Xevo TQ-S (Waters) XEVO_TQ_S 182
studyAssayPlatform Other OTHER_PLATFORM 183
\ No newline at end of file
diff --git a/scripts/search/data/shape/shapefile.zip b/scripts/search/data/shape/shapefile.zip
new file mode 100644
index 00000000000..c4da60f0b80
Binary files /dev/null and b/scripts/search/data/shape/shapefile.zip differ
diff --git a/scripts/zipdownload/src/main/java/edu/harvard/iq/dataverse/custom/service/download/ZipDownloadService.java b/scripts/zipdownload/src/main/java/edu/harvard/iq/dataverse/custom/service/download/ZipDownloadService.java
index cda7cbb9505..4b66ee770d5 100644
--- a/scripts/zipdownload/src/main/java/edu/harvard/iq/dataverse/custom/service/download/ZipDownloadService.java
+++ b/scripts/zipdownload/src/main/java/edu/harvard/iq/dataverse/custom/service/download/ZipDownloadService.java
@@ -127,6 +127,7 @@ public void processFiles() {
}
Set zippedFolders = new HashSet<>();
+ Set fileNamesList = new HashSet<>();
for (String [] fileEntry : jobFiles) {
String storageLocation = fileEntry[0];
@@ -144,13 +145,15 @@ public void processFiles() {
InputStream inputStream = this.directAccessUtil.openDirectAccess(storageLocation);
- // (potential?) TODO: String zipEntryName = checkZipEntryName(fileName);
+ String zipEntryName = checkZipEntryName(fileName, fileNamesList);
// this may not be needed anymore - some extra sanitizing of the file
// name we used to have to do - since all the values in a current Dataverse
- // database may already be santized enough.
+ // database may already be santized enough.
+ // (Edit: Yes, we still need this - there are still datasets with multiple
+ // files with duplicate names; this method takes care of that)
if (inputStream != null && this.zipOutputStream != null) {
- ZipEntry entry = new ZipEntry(fileName);
+ ZipEntry entry = new ZipEntry(zipEntryName);
byte[] bytes = new byte[2 * 8192];
int read = 0;
@@ -158,8 +161,8 @@ public void processFiles() {
try {
// Does this file have a folder name?
- if (hasFolder(fileName)) {
- addFolderToZipStream(getFolderName(fileName), zippedFolders);
+ if (hasFolder(zipEntryName)) {
+ addFolderToZipStream(getFolderName(zipEntryName), zippedFolders);
}
this.zipOutputStream.putNextEntry(entry);
@@ -168,7 +171,6 @@ public void processFiles() {
this.zipOutputStream.write(bytes, 0, read);
readSize += read;
}
- inputStream.close();
this.zipOutputStream.closeEntry();
/*if (fileSize == readSize) {
@@ -178,6 +180,12 @@ public void processFiles() {
}*/
} catch (IOException ioex) {
System.err.println("Failed to compress "+storageLocation);
+ } finally {
+ try {
+ inputStream.close();
+ } catch (IOException ioexIgnore) {
+ System.err.println("Warning: IO exception trying to close input stream - "+storageLocation);
+ }
}
} else {
System.err.println("Failed to access "+storageLocation);
@@ -237,4 +245,21 @@ private void addFolderToZipStream(String folderName, Set zippedFolders)
}
}
}
+
+ // check for and process duplicates:
+ private String checkZipEntryName(String originalName, Set fileNames) {
+ String name = originalName;
+ int fileSuffix = 1;
+ int extensionIndex = originalName.lastIndexOf(".");
+
+ while (fileNames.contains(name)) {
+ if (extensionIndex != -1) {
+ name = originalName.substring(0, extensionIndex) + "_" + fileSuffix++ + originalName.substring(extensionIndex);
+ } else {
+ name = originalName + "_" + fileSuffix++;
+ }
+ }
+ fileNames.add(name);
+ return name;
+ }
}
diff --git a/src/main/java/ValidationMessages.properties b/src/main/java/ValidationMessages.properties
index 9c4f69252cf..4dfce141f41 100644
--- a/src/main/java/ValidationMessages.properties
+++ b/src/main/java/ValidationMessages.properties
@@ -18,6 +18,7 @@ dataverse.aliasLength=Alias must be at most 60 characters.
dataverse.aliasNotnumber=Alias should not be a number
dataverse.nameIllegalCharacters=Found an illegal character(s). Valid characters are a-Z, 0-9, '_', and '-'.
dataverse.category=Please select a category for your dataverse.
+dataverse.contact=Please enter a valid email address.
contenttype.slash=Content-Type must contain a slash
setspec.notNumber=Setspec should not be a number
setspec.maxLength=Setspec must be at most 30 characters.
@@ -47,3 +48,4 @@ password.current=Please enter your current password.
password.validate=Password reset page default email message.
guestbook.name=Enter a name for the guestbook
+
diff --git a/src/main/java/ValidationMessages_fr.properties b/src/main/java/ValidationMessages_fr.properties
deleted file mode 100644
index 40c43b00969..00000000000
--- a/src/main/java/ValidationMessages_fr.properties
+++ /dev/null
@@ -1,49 +0,0 @@
-user.firstName=Veuillez entrer votre prénom.
-user.lastName=Veuillez entrer votre nom de famille.
-user.invalidEmail=Veuillez entrer une adresse courriel valide.
-user.enterUsername=Veuillez entrer un nom d'utilisateur.
-user.usernameLength=Le nom d'utilisateur doit comporter entre 2 et 60 caractères.
-user.illegalCharacters=Caractère(s) non valide(s) utilisé(s). Les caractères valides sont a-Z, 0-9, '_', '-' et '.'.
-
-user.enterNickname=Veuillez entrer un pseudonyme.
-user.nicknameLength=Le pseudonyme ne peut excéder 30 caractères.
-user.nicknameNotnumber=Le pseudonyme ne devrait pas être un nombre
-
-dataset.templatename=Veuillez ajouter un nom pour le modèle d'ensemble de données.
-dataset.nameLength=Le nom ne peut excéder 255 caractères.
-
-dataverse.name=Veuillez entrer un nom.
-dataverse.alias=Veuillez entrer un alias.
-dataverse.aliasLength=L'alias ne peut excéder 60 caractères.
-dataverse.aliasNotnumber=L'alias ne devrait pas être un nombre.
-dataverse.nameIllegalCharacters=Caractère(s) non valide(s) utilisé(s). Les caractères valides sont a-Z, 0-9, '_', '-'.
-dataverse.category=Veuillez sélectionner une catégorie pour votre dataverse.
-contenttype.slash=Le type de contenu doit contenir une barre oblique.
-setspec.notNumber=Le nom (Setspec) ne devrait pas être un nombre.
-setspec.maxLength=Le nom (Setspec) ne peut excéder 30 caractères.
-
-role.name=Un rôle doit avoir un nom.
-desc.maxLength=Le description ne peut excéder 255 caractères.
-alias.maxLength=L'alias ne peut excéder 26 caractères.
-alias.illegalCharacters=L'alias ne peut être vide. Les caractères valides sont a-Z, 0-9, '_', '-'.
-
-custom.response=Veuillez entrer la réponse.
-custom.questiontext=Veuillez entrer le texte de la question.
-filename.illegalCharacters=Le nom du fichier ne peut contenir aucun des caractères suivants\u00A0: \ / : * ? " < > | ; # .
-directoryname.illegalCharacters=Le nom du répertoire ne peut pas être suivi ni précédé d'un caractère séparateur de fichiers.
-filename.blank=Veuillez spécifier un nom de fichier.
-
-
-map.layername=Veuillez spécifier un nom de couche cartographique.
-map.layerlink=Veuillez spécifier un lien de couche cartographique.
-map.link=Veuillez spécifier un lien de carte intégrée.
-map.imagelink=Veuillez spécifier un lien d'image de carte.
-map.username=Veuillez spécifier un nom d'utilisateur WorldMap.
-
-oauth.username=Veuillez entrer votre nom d'utilisateur.
-
-password.retype=Le nouveau mot de passe est vide\u00A0: veuillez le retaper à nouveau.
-password.current=Veuillez entrer votre mot de passe actuel.
-password.validate=Page de réinitialisation du mot de passe par défaut.
-
-guestbook.name=Saisir un nom pour le registre de visiteurs.
diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java
index ca9e55e2f92..ba503a18d22 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java
@@ -118,20 +118,11 @@ public String reserveIdentifier(String identifier, Map metadata,
} else {
rc.setUrl(target);
}
- try {
- DataCiteRESTfullClient client = getClient();
- retString = client.postMetadata(xmlMetadata);
- } catch (UnsupportedEncodingException ex) {
- Logger.getLogger(DOIDataCiteRegisterService.class.getName()).log(Level.SEVERE, null, ex);
- }
- } else {
- try {
- DataCiteRESTfullClient client = getClient();
- retString = client.postMetadata(xmlMetadata);
- } catch (UnsupportedEncodingException ex) {
- Logger.getLogger(DOIDataCiteRegisterService.class.getName()).log(Level.SEVERE, null, ex);
- }
}
+
+ DataCiteRESTfullClient client = getClient();
+ retString = client.postMetadata(xmlMetadata);
+
return retString;
}
@@ -149,22 +140,12 @@ public String registerIdentifier(String identifier, Map metadata
} else {
rc.setUrl(target);
}
- try {
- DataCiteRESTfullClient client = getClient();
- retString = client.postMetadata(xmlMetadata);
- client.postUrl(identifier.substring(identifier.indexOf(":") + 1), target);
- } catch (UnsupportedEncodingException ex) {
- Logger.getLogger(DOIDataCiteRegisterService.class.getName()).log(Level.SEVERE, null, ex);
- }
- } else {
- try {
- DataCiteRESTfullClient client = getClient();
- retString = client.postMetadata(xmlMetadata);
- client.postUrl(identifier.substring(identifier.indexOf(":") + 1), target);
- } catch (UnsupportedEncodingException ex) {
- Logger.getLogger(DOIDataCiteRegisterService.class.getName()).log(Level.SEVERE, null, ex);
- }
}
+
+ DataCiteRESTfullClient client = getClient();
+ retString = client.postMetadata(xmlMetadata);
+ client.postUrl(identifier.substring(identifier.indexOf(":") + 1), target);
+
return retString;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCiteRESTfullClient.java b/src/main/java/edu/harvard/iq/dataverse/DataCiteRESTfullClient.java
index 913dc4d0034..491f19ab36c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataCiteRESTfullClient.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataCiteRESTfullClient.java
@@ -9,8 +9,6 @@
import java.io.Closeable;
import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-
import java.util.logging.Level;
import java.util.logging.Logger;
@@ -102,24 +100,19 @@ public String getUrl(String doi) {
* @param url
* @return
*/
- public String postUrl(String doi, String url) throws UnsupportedEncodingException {
+ public String postUrl(String doi, String url) throws IOException {
HttpPost httpPost = new HttpPost(this.url + "/doi");
httpPost.setHeader("Content-Type", "text/plain;charset=UTF-8");
httpPost.setEntity(new StringEntity("doi=" + doi + "\nurl=" + url, "utf-8"));
- try {
- HttpResponse response = httpClient.execute(httpPost,context);
- String data = EntityUtils.toString(response.getEntity(), encoding);
- if (response.getStatusLine().getStatusCode() != 201) {
- String errMsg = "Response code: " + response.getStatusLine().getStatusCode() + ", " + data;
- logger.log(Level.SEVERE,errMsg);
- throw new RuntimeException(errMsg);
- }
- return data;
- } catch (IOException ioe) {
- logger.log(Level.SEVERE,"IOException when post url");
- throw new RuntimeException("IOException when post url", ioe);
+ HttpResponse response = httpClient.execute(httpPost, context);
+ String data = EntityUtils.toString(response.getEntity(), encoding);
+ if (response.getStatusLine().getStatusCode() != 201) {
+ String errMsg = "Response from postUrl: " + response.getStatusLine().getStatusCode() + ", " + data;
+ logger.log(Level.SEVERE, errMsg);
+ throw new IOException(errMsg);
}
+ return data;
}
/**
@@ -135,7 +128,7 @@ public String getMetadata(String doi) {
HttpResponse response = httpClient.execute(httpGet,context);
String data = EntityUtils.toString(response.getEntity(), encoding);
if (response.getStatusLine().getStatusCode() != 200) {
- String errMsg = "Response code: " + response.getStatusLine().getStatusCode() + ", " + data;
+ String errMsg = "Response from getMetadata: " + response.getStatusLine().getStatusCode() + ", " + data;
logger.log(Level.SEVERE, errMsg);
throw new RuntimeException(errMsg);
}
@@ -152,21 +145,16 @@ public String getMetadata(String doi) {
* @param doi
* @return boolean true if identifier already exists on DataCite site
*/
- public boolean testDOIExists(String doi) {
- HttpGet httpGet = new HttpGet(this.url + "/metadata/" + doi);
- httpGet.setHeader("Accept", "application/xml");
- try {
- HttpResponse response = httpClient.execute(httpGet,context);
- if (response.getStatusLine().getStatusCode() != 200) {
- EntityUtils.consumeQuietly(response.getEntity());
- return false;
- }
+ public boolean testDOIExists(String doi) throws IOException {
+ HttpGet httpGet = new HttpGet(this.url + "/metadata/" + doi);
+ httpGet.setHeader("Accept", "application/xml");
+ HttpResponse response = httpClient.execute(httpGet, context);
+ if (response.getStatusLine().getStatusCode() != 200) {
EntityUtils.consumeQuietly(response.getEntity());
- return true;
- } catch (IOException ioe) {
- logger.log(Level.SEVERE, "IOException when get metadata");
- throw new RuntimeException("IOException when get metadata", ioe);
- }
+ return false;
+ }
+ EntityUtils.consumeQuietly(response.getEntity());
+ return true;
}
/**
@@ -182,7 +170,7 @@ public String postMetadata(String metadata) throws IOException {
HttpResponse response = httpClient.execute(httpPost, context);
String data = EntityUtils.toString(response.getEntity(), encoding);
if (response.getStatusLine().getStatusCode() != 201) {
- String errMsg = "Response code: " + response.getStatusLine().getStatusCode() + ", " + data;
+ String errMsg = "Response from postMetadata: " + response.getStatusLine().getStatusCode() + ", " + data;
logger.log(Level.SEVERE, errMsg);
throw new IOException(errMsg);
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
index 4d04ee1889d..1aadcc9851e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
@@ -580,7 +580,7 @@ public void findFileMetadataOptimizedExperimental(Dataset owner, DatasetVersion
int i = 0;
- List