diff --git a/doc/release-notes/9955-Signposting-updates.md b/doc/release-notes/9955-Signposting-updates.md new file mode 100644 index 00000000000..bf0c7bc646b --- /dev/null +++ b/doc/release-notes/9955-Signposting-updates.md @@ -0,0 +1 @@ +This release fixes two issues (#9952, #9953) where the Signposting output did not match the Signposting specification. \ No newline at end of file diff --git a/doc/release-notes/9983-unique-constraints.md b/doc/release-notes/9983-unique-constraints.md new file mode 100644 index 00000000000..bb3ed200c62 --- /dev/null +++ b/doc/release-notes/9983-unique-constraints.md @@ -0,0 +1,9 @@ +This release adds two missing database constraints that will assure that the externalvocabularyvalue table only has one entry for each uri and that the oaiset table only has one set for each spec. (In the very unlikely case that your existing database has duplicate entries now, install would fail. This can be checked by running + +SELECT uri, count(*) FROM externalvocabularyvaluet group by uri; + +and + +SELECT spec, count(*) FROM oaiset group by spec; + +and then removing any duplicate rows (where count>1). diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 4d9466703e4..7d633dc267b 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -2171,11 +2171,11 @@ Signposting involves the addition of a `Link ;rel="cite-as", ;rel="describedby";type="application/vnd.citationstyles.csl+json",;rel="describedby";type="application/json+ld", ;rel="type",;rel="type", https://demo.dataverse.org/api/datasets/:persistentId/versions/1.0/customlicense?persistentId=doi:10.5072/FK2/YD5QDG;rel="license", ; rel="linkset";type="application/linkset+json"`` +``Link: ;rel="cite-as", ;rel="describedby";type="application/vnd.citationstyles.csl+json",;rel="describedby";type="application/ld+json", ;rel="type",;rel="type", ;rel="license", ; rel="linkset";type="application/linkset+json"`` The URL for linkset information is discoverable under the ``rel="linkset";type="application/linkset+json`` entry in the "Link" header, such as in the example above. -The reponse includes a JSON object conforming to the `Signposting `__ specification. +The reponse includes a JSON object conforming to the `Signposting `__ specification. As part of this conformance, unlike most Dataverse API responses, the output is not wrapped in a ``{"status":"OK","data":{`` object. Signposting is not supported for draft dataset versions. .. code-block:: bash diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index 89f8c11d076..70d023aee2c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -19,6 +19,8 @@ import javax.ejb.EJB; import javax.ejb.Stateless; +import javax.ejb.TransactionAttribute; +import javax.ejb.TransactionAttributeType; import javax.inject.Named; import javax.json.Json; import javax.json.JsonArray; @@ -34,6 +36,7 @@ import javax.persistence.NoResultException; import javax.persistence.NonUniqueResultException; import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceException; import javax.persistence.TypedQuery; import org.apache.commons.codec.digest.DigestUtils; @@ -59,28 +62,28 @@ public class DatasetFieldServiceBean implements java.io.Serializable { @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; - + private static final Logger logger = Logger.getLogger(DatasetFieldServiceBean.class.getCanonicalName()); @EJB SettingsServiceBean settingsService; private static final String NAME_QUERY = "SELECT dsfType from DatasetFieldType dsfType where dsfType.name= :fieldName"; - + /* * External vocabulary support: These fields cache information from the CVocConf * setting which controls how Dataverse connects specific metadata block fields * to third-party Javascripts and external vocabulary services to allow users to * input values from a vocabulary(ies) those services manage. */ - + //Configuration json keyed by the id of the 'parent' DatasetFieldType Map cvocMap = null; - + //Configuration json keyed by the id of the child DatasetFieldType specified as the 'term-uri-field' //Note that for primitive fields, the prent and term-uri-field are the same and these maps have the same entry Map cvocMapByTermUri = null; - + //The hash of the existing CVocConf setting. Used to determine when the setting has changed and it needs to be re-parsed to recreate the cvocMaps String oldHash = null; @@ -90,7 +93,7 @@ public List findAllAdvancedSearchFieldTypes() { public List findAllFacetableFieldTypes() { return em.createNamedQuery("DatasetFieldType.findAllFacetable", DatasetFieldType.class) - .getResultList(); + .getResultList(); } public List findFacetableFieldTypesByMetadataBlock(Long metadataBlockId) { @@ -121,7 +124,7 @@ public DatasetFieldType findByName(String name) { } catch (NoResultException e) { return null; } - + } /** @@ -142,11 +145,11 @@ public DatasetFieldType findByNameOpt(String name) { } } - /* + /* * Similar method for looking up foreign metadata field mappings, for metadata - * imports. for these the uniquness of names isn't guaranteed (i.e., there - * can be a field "author" in many different formats that we want to support), - * so these have to be looked up by both the field name and the name of the + * imports. for these the uniquness of names isn't guaranteed (i.e., there + * can be a field "author" in many different formats that we want to support), + * so these have to be looked up by both the field name and the name of the * foreign format. */ public ForeignMetadataFieldMapping findFieldMapping(String formatName, String pathName) { @@ -164,7 +167,7 @@ public ForeignMetadataFieldMapping findFieldMapping(String formatName, String pa public ControlledVocabularyValue findControlledVocabularyValue(Object pk) { return em.find(ControlledVocabularyValue.class, pk); } - + /** * @param dsft The DatasetFieldType in which to look up a * ControlledVocabularyValue. @@ -175,7 +178,7 @@ public ControlledVocabularyValue findControlledVocabularyValue(Object pk) { * @return The ControlledVocabularyValue found or null. */ public ControlledVocabularyValue findControlledVocabularyValueByDatasetFieldTypeAndStrValue(DatasetFieldType dsft, String strValue, boolean lenient) { - TypedQuery typedQuery = em.createQuery("SELECT OBJECT(o) FROM ControlledVocabularyValue AS o WHERE o.strValue = :strvalue AND o.datasetFieldType = :dsft", ControlledVocabularyValue.class); + TypedQuery typedQuery = em.createQuery("SELECT OBJECT(o) FROM ControlledVocabularyValue AS o WHERE o.strValue = :strvalue AND o.datasetFieldType = :dsft", ControlledVocabularyValue.class); typedQuery.setParameter("strvalue", strValue); typedQuery.setParameter("dsft", dsft); try { @@ -199,7 +202,7 @@ public ControlledVocabularyValue findControlledVocabularyValueByDatasetFieldType } } } - + public ControlledVocabAlternate findControlledVocabAlternateByControlledVocabularyValueAndStrValue(ControlledVocabularyValue cvv, String strValue){ TypedQuery typedQuery = em.createQuery("SELECT OBJECT(o) FROM ControlledVocabAlternate AS o WHERE o.strValue = :strvalue AND o.controlledVocabularyValue = :cvv", ControlledVocabAlternate.class); typedQuery.setParameter("strvalue", strValue); @@ -214,7 +217,7 @@ public ControlledVocabAlternate findControlledVocabAlternateByControlledVocabula return (ControlledVocabAlternate) results.get(0); } } - + /** * @param dsft The DatasetFieldType in which to look up a * ControlledVocabularyValue. @@ -224,7 +227,7 @@ public ControlledVocabAlternate findControlledVocabAlternateByControlledVocabula * @return The ControlledVocabularyValue found or null. */ public ControlledVocabularyValue findControlledVocabularyValueByDatasetFieldTypeAndIdentifier (DatasetFieldType dsft, String identifier) { - TypedQuery typedQuery = em.createQuery("SELECT OBJECT(o) FROM ControlledVocabularyValue AS o WHERE o.identifier = :identifier AND o.datasetFieldType = :dsft", ControlledVocabularyValue.class); + TypedQuery typedQuery = em.createQuery("SELECT OBJECT(o) FROM ControlledVocabularyValue AS o WHERE o.identifier = :identifier AND o.datasetFieldType = :dsft", ControlledVocabularyValue.class); typedQuery.setParameter("identifier", identifier); typedQuery.setParameter("dsft", dsft); try { @@ -253,11 +256,11 @@ public MetadataBlock save(MetadataBlock mdb) { public ControlledVocabularyValue save(ControlledVocabularyValue cvv) { return em.merge(cvv); } - + public ControlledVocabAlternate save(ControlledVocabAlternate alt) { return em.merge(alt); - } - + } + /** * This method returns a Map relating DatasetFieldTypes with any external @@ -267,14 +270,14 @@ public ControlledVocabAlternate save(ControlledVocabAlternate alt) { * id or of the child field specified as the 'term-uri-field' (the field where * the URI of the term is stored (and not one of the child fields where the term * name, vocabulary URI, vocabulary Name or other managed information may go.) - * + * * The map only contains values for DatasetFieldTypes that are configured to use external vocabulary services. - * + * * @param byTermUriField - false: the id of the parent DatasetFieldType is the key, true: the 'term-uri-field' DatasetFieldType id is used as the key * @return - a map of JsonObjects containing configuration information keyed by the DatasetFieldType id (Long) */ public Map getCVocConf(boolean byTermUriField){ - + //ToDo - change to an API call to be able to provide feedback if the json is invalid? String cvocSetting = settingsService.getValueForKey(SettingsServiceBean.Key.CVocConf); if (cvocSetting == null || cvocSetting.isEmpty()) { @@ -284,11 +287,11 @@ public Map getCVocConf(boolean byTermUriField){ String newHash = DigestUtils.md5Hex(cvocSetting); if (newHash.equals(oldHash)) { return byTermUriField ? cvocMapByTermUri : cvocMap; - } + } oldHash=newHash; cvocMap=new HashMap<>(); cvocMapByTermUri=new HashMap<>(); - + try (JsonReader jsonReader = Json.createReader(new StringReader(settingsService.getValueForKey(SettingsServiceBean.Key.CVocConf)))) { JsonArray cvocConfJsonArray = jsonReader.readArray(); for (JsonObject jo : cvocConfJsonArray.getValuesAs(JsonObject.class)) { @@ -343,7 +346,7 @@ public Map getCVocConf(boolean byTermUriField){ * @param df - the primitive/parent compound field containing a newly saved value */ public void registerExternalVocabValues(DatasetField df) { - DatasetFieldType dft =df.getDatasetFieldType(); + DatasetFieldType dft =df.getDatasetFieldType(); logger.fine("Registering for field: " + dft.getName()); JsonObject cvocEntry = getCVocConf(true).get(dft.getId()); if (dft.isPrimitive()) { @@ -364,19 +367,19 @@ public void registerExternalVocabValues(DatasetField df) { } } } - + /** * Retrieves indexable strings from a cached externalvocabularyvalue entry. - * + * * This method assumes externalvocabularyvalue entries have been filtered and * the externalvocabularyvalue entry contain a single JsonObject whose "personName" or "termName" values * are either Strings or an array of objects with "lang" and ("value" or "content") keys. The * string, or the "value/content"s for each language are added to the set. - * + * * Any parsing error results in no entries (there can be unfiltered entries with * unknown structure - getting some strings from such an entry could give fairly * random info that would be bad to addd for searches, etc.) - * + * * @param termUri * @return - a set of indexable strings */ @@ -418,7 +421,7 @@ public Set getStringsFor(String termUri) { } logger.fine("Returning " + String.join(",", strings) + " for " + termUri); return strings; - } + } /** * Perform a query to retrieve a cached value from the externalvocabularvalue table @@ -445,9 +448,10 @@ public JsonObject getExternalVocabularyValue(String termUri) { /** * Perform a call to the external service to retrieve information about the term URI - * @param cvocEntry - the configuration for the DatasetFieldType associated with this term + * @param cvocEntry - the configuration for the DatasetFieldType associated with this term * @param term - the term uri as a string */ + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public void registerExternalTerm(JsonObject cvocEntry, String term) { String retrievalUri = cvocEntry.getString("retrieval-uri"); String prefix = cvocEntry.getString("prefix", null); @@ -500,7 +504,8 @@ public void process(HttpResponse response, HttpContext context) throws HttpExcep .setRetryHandler(new DefaultHttpRequestRetryHandler(3, false)) .build()) { HttpGet httpGet = new HttpGet(retrievalUri); - httpGet.addHeader("Accept", "application/json+ld, application/json"); + //application/json+ld is for backward compatibility + httpGet.addHeader("Accept", "application/ld+json, application/json+ld, application/json"); HttpResponse response = httpClient.execute(httpGet); String data = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); @@ -508,7 +513,7 @@ public void process(HttpResponse response, HttpContext context) throws HttpExcep if (statusCode == 200) { logger.fine("Returned data: " + data); try (JsonReader jsonReader = Json.createReader(new StringReader(data))) { - String dataObj =filterResponse(cvocEntry, jsonReader.readObject(), term).toString(); + String dataObj =filterResponse(cvocEntry, jsonReader.readObject(), term).toString(); evv.setValue(dataObj); evv.setLastUpdateDate(Timestamp.from(Instant.now())); logger.fine("JsonObject: " + dataObj); @@ -517,6 +522,8 @@ public void process(HttpResponse response, HttpContext context) throws HttpExcep logger.fine("Wrote value for term: " + term); } catch (JsonException je) { logger.severe("Error retrieving: " + retrievalUri + " : " + je.getMessage()); + } catch (PersistenceException e) { + logger.fine("Problem persisting: " + retrievalUri + " : " + e.getMessage()); } } else { logger.severe("Received response code : " + statusCode + " when retrieving " + retrievalUri @@ -537,7 +544,7 @@ public void process(HttpResponse response, HttpContext context) throws HttpExcep * Parse the raw value returned by an external service for a give term uri and * filter it according to the 'retrieval-filtering' configuration for this * DatasetFieldType, creating a Json value with the specified structure - * + * * @param cvocEntry - the config for this DatasetFieldType * @param readObject - the raw response from the service * @param termUri - the term uri @@ -633,7 +640,7 @@ Object processPathSegment(int index, String[] pathParts, JsonValue curPath, Stri String[] keyVal = pathParts[index].split("="); logger.fine("Looking for object where " + keyVal[0] + " is " + keyVal[1]); String expected = keyVal[1]; - + if (!expected.equals("*")) { if (expected.equals("@id")) { expected = termUri; @@ -662,7 +669,7 @@ Object processPathSegment(int index, String[] pathParts, JsonValue curPath, Stri } return parts.build(); } - + } else { curPath = ((JsonObject) curPath).get(pathParts[index]); logger.fine("Found next Path object " + curPath.toString()); @@ -672,7 +679,7 @@ Object processPathSegment(int index, String[] pathParts, JsonValue curPath, Stri logger.fine("Last segment: " + curPath.toString()); logger.fine("Looking for : " + pathParts[index]); JsonValue jv = ((JsonObject) curPath).get(pathParts[index]); - ValueType type =jv.getValueType(); + ValueType type =jv.getValueType(); if (type.equals(JsonValue.ValueType.STRING)) { return ((JsonString) jv).getString(); } else if (jv.getValueType().equals(JsonValue.ValueType.ARRAY)) { @@ -685,13 +692,13 @@ Object processPathSegment(int index, String[] pathParts, JsonValue curPath, Stri return null; } - + /** * Supports validation of externally controlled values. If the value is a URI it * must be in the namespace (start with) one of the uriSpace values of an * allowed vocabulary. If free text entries are allowed for this field (per the * configuration), non-uri entries are also assumed valid. - * + * * @param dft * @param value * @return - true: valid @@ -723,7 +730,7 @@ public boolean isValidCVocValue(DatasetFieldType dft, String value) { } return valid; } - + public List getVocabScripts( Map cvocConf) { //ToDo - only return scripts that are needed (those fields are set on display pages, those blocks/fields are allowed in the Dataverse collection for create/edit)? Set scripts = new HashSet(); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java index df126514308..73d3cee1c15 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java @@ -284,7 +284,7 @@ public void setDisplayOnCreate(boolean displayOnCreate) { } public boolean isControlledVocabulary() { - return controlledVocabularyValues != null && !controlledVocabularyValues.isEmpty(); + return allowControlledVocabulary; } /** diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 393c6cfad16..2155bf76d04 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2864,6 +2864,12 @@ public void sort() { public String refresh() { logger.fine("refreshing"); + //In v5.14, versionId was null here. In 6.0, it appears not to be. + //This check is to handle the null if it reappears/occurs under other circumstances + if(versionId==null) { + logger.warning("versionId was null in refresh"); + versionId = workingVersion.getId(); + } //dataset = datasetService.find(dataset.getId()); dataset = null; workingVersion = null; @@ -2873,10 +2879,9 @@ public String refresh() { DatasetVersionServiceBean.RetrieveDatasetVersionResponse retrieveDatasetVersionResponse = null; if (versionId != null) { - // versionId must have been set by now, in the init() method, - // regardless of how the page was originally called - by the dataset - // database id, by the persistent identifier, or by the db id of - // the version. + // versionId must have been set by now (see null check above), in the init() + // method, regardless of how the page was originally called - by the dataset + // database id, by the persistent identifier, or by the db id of the version. this.workingVersion = datasetVersionService.findDeep(versionId); dataset = workingVersion.getDataset(); } @@ -6145,7 +6150,10 @@ public String getWebloaderUrlForDataset(Dataset d) { String signpostingLinkHeader = null; public String getSignpostingLinkHeader() { - if (!workingVersion.isReleased()) { + if ((workingVersion==null) || (!workingVersion.isReleased())) { + if(workingVersion==null) { + logger.warning("workingVersion was null in getSignpostingLinkHeader"); + } return null; } if (signpostingLinkHeader == null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index dcd7eacf50b..a6eb8e8176f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -277,7 +277,7 @@ public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id" @GET @Path("/export") - @Produces({"application/xml", "application/json", "application/html" }) + @Produces({"application/xml", "application/json", "application/html", "application/ld+json" }) public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { try { @@ -579,24 +579,26 @@ public Response getVersionMetadataBlock(@Context ContainerRequestContext crc, @GET @AuthRequired @Path("{id}/versions/{versionId}/linkset") - public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - if ( ":draft".equals(versionId) ) { + public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, + @Context UriInfo uriInfo, @Context HttpHeaders headers) { + if (":draft".equals(versionId)) { return badRequest("Signposting is not supported on the :draft version"); } - User user = getRequestUser(crc); - return response(req -> { + DataverseRequest req = createDataverseRequest(getRequestUser(crc)); + try { DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - return ok(Json.createObjectBuilder().add( - "linkset", - new SignpostingResources( - systemConfig, - dsv, - JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""), - JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse("") - ).getJsonLinkset() - ) - ); - }, user); + return Response + .ok(Json.createObjectBuilder() + .add("linkset", + new SignpostingResources(systemConfig, dsv, + JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""), + JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse("")) + .getJsonLinkset()) + .build()) + .type(MediaType.APPLICATION_JSON).build(); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } } @GET diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java index 54be3a8765f..40c55f8114c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java @@ -71,14 +71,14 @@ public String getLinks() { String describedby = "<" + ds.getGlobalId().asURL().toString() + ">;rel=\"describedby\"" + ";type=\"" + "application/vnd.citationstyles.csl+json\""; describedby += ",<" + systemConfig.getDataverseSiteUrl() + "/api/datasets/export?exporter=schema.org&persistentId=" - + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() + ">;rel=\"describedby\"" + ";type=\"application/json+ld\""; + + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() + ">;rel=\"describedby\"" + ";type=\"application/ld+json\""; valueList.add(describedby); String type = ";rel=\"type\""; type = ";rel=\"type\",<" + defaultFileTypeValue + ">;rel=\"type\""; valueList.add(type); - String licenseString = DatasetUtil.getLicenseURI(workingDatasetVersion) + ";rel=\"license\""; + String licenseString = "<" + DatasetUtil.getLicenseURI(workingDatasetVersion) + ">;rel=\"license\""; valueList.add(licenseString); String linkset = "<" + systemConfig.getDataverseSiteUrl() + "/api/datasets/:persistentId/versions/" @@ -116,7 +116,7 @@ public JsonArrayBuilder getJsonLinkset() { systemConfig.getDataverseSiteUrl() + "/api/datasets/export?exporter=schema.org&persistentId=" + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() ).add( "type", - "application/json+ld" + "application/ld+json" ) ); JsonArrayBuilder linksetJsonObj = Json.createArrayBuilder(); diff --git a/src/main/resources/db/migration/V6.0.0.2__9983-missing-unique-constraints.sql b/src/main/resources/db/migration/V6.0.0.2__9983-missing-unique-constraints.sql new file mode 100644 index 00000000000..6cb3a455e4e --- /dev/null +++ b/src/main/resources/db/migration/V6.0.0.2__9983-missing-unique-constraints.sql @@ -0,0 +1,16 @@ +DO $$ +BEGIN + + BEGIN + ALTER TABLE externalvocabularyvalue ADD CONSTRAINT externalvocabularvalue_uri_key UNIQUE(uri); + EXCEPTION + WHEN duplicate_table THEN RAISE NOTICE 'Table unique constraint externalvocabularvalue_uri_key already exists'; + END; + + BEGIN + ALTER TABLE oaiset ADD CONSTRAINT oaiset_spec_key UNIQUE(spec); + EXCEPTION + WHEN duplicate_table THEN RAISE NOTICE 'Table unique constraint oaiset_spec_key already exists'; + END; + +END $$; \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java index e22d0740c48..16cef17073b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java @@ -80,6 +80,7 @@ public void testSignposting() { assertTrue(linkHeader.contains(datasetPid)); assertTrue(linkHeader.contains("cite-as")); assertTrue(linkHeader.contains("describedby")); + assertTrue(linkHeader.contains(";rel=\"license\"")); Pattern pattern = Pattern.compile("<([^<]*)> ; rel=\"linkset\";type=\"application\\/linkset\\+json\""); Matcher matcher = pattern.matcher(linkHeader); @@ -92,7 +93,7 @@ public void testSignposting() { String responseString = linksetResponse.getBody().asString(); - JsonObject data = JsonUtil.getJsonObject(responseString).getJsonObject("data"); + JsonObject data = JsonUtil.getJsonObject(responseString); JsonObject lso = data.getJsonArray("linkset").getJsonObject(0); System.out.println("Linkset: " + lso.toString()); @@ -101,6 +102,16 @@ public void testSignposting() { assertTrue(lso.getString("anchor").indexOf("/dataset.xhtml?persistentId=" + datasetPid) > 0); assertTrue(lso.containsKey("describedby")); + // Test export URL from link header + // regex inspired by https://stackoverflow.com/questions/68860255/how-to-match-the-closest-opening-and-closing-brackets + Pattern exporterPattern = Pattern.compile("[<\\[][^()\\[\\]]*?exporter=schema.org[^()\\[\\]]*[>\\]]"); + Matcher exporterMatcher = exporterPattern.matcher(linkHeader); + exporterMatcher.find(); + + Response exportDataset = UtilIT.exportDataset(datasetPid, "schema.org"); + exportDataset.prettyPrint(); + exportDataset.then().assertThat().statusCode(OK.getStatusCode()); + } }