Skip to content

Commit

Permalink
[ALS-4736] Add named dataset (#146)
Browse files Browse the repository at this point in the history
* Add table and API for naming a dataset id (#132)
* Adds named_dataset sql table for saving named dataset ids.
* Adds a SecurityContext class to persist authenticated user information for the entire request.
* Updates JWTFilter class to add the new SecurityContext to the context object.
* Adds classes for new service, entity, and endpoints needed.
* Add metadata column to named_dataset table to extend in the future. (#135)
* Update migration file name. (#143)
  • Loading branch information
srpiatt committed Sep 25, 2023
1 parent 0bc1130 commit 3babb26
Show file tree
Hide file tree
Showing 19 changed files with 1,356 additions and 371 deletions.
5 changes: 5 additions & 0 deletions pic-sure-api-data/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -37,5 +37,10 @@
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
</dependency>
<dependency>
<groupId>io.swagger.core.v3</groupId>
<artifactId>swagger-annotations</artifactId>
<version>2.2.8</version>
</dependency>
</dependencies>
</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
package edu.harvard.dbmi.avillach.data.entity;

import java.security.Principal;

import javax.json.Json;

/*
* This class is used to mirror the User object from the auth DB to maintain schema separation. - nc
*/
public class AuthUser extends BaseEntity implements Principal {
private String userId;

private String subject;

private String roles;

private String email;

public String getUserId() {
return userId;
}

public AuthUser setUserId(String userId) {
this.userId = userId;
return this;
}

public String getSubject() {
return subject;
}

public AuthUser setSubject(String subject) {
this.subject = subject;
return this;
}

public String getRoles() {
return roles;
}

public AuthUser setRoles(String roles) {
this.roles = roles;
return this;
}

public String getEmail(){
return email;
}

public AuthUser setEmail(String email){
this.email = email;
return this;
}

@Override // Principal method
public String getName() {
return getEmail();
}

@Override
public String toString() {
return Json.createObjectBuilder()
.add("userId", userId)
.add("subject", subject)
.add("email", email)
.add("roles", roles)
.build().toString();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
package edu.harvard.dbmi.avillach.data.entity;

import java.util.Map;

import javax.json.Json;
import javax.persistence.Column;
import javax.persistence.Convert;
import javax.persistence.Entity;
import javax.persistence.JoinColumn;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import javax.persistence.UniqueConstraint;

import io.swagger.v3.oas.annotations.media.Schema;

import edu.harvard.dbmi.avillach.data.entity.convert.JsonConverter;

@Schema(description = "A NamedDataset object containing query, name, user, and archived status.")
@Entity(name = "named_dataset")
@Table(uniqueConstraints = {
@UniqueConstraint(name = "unique_queryId_user", columnNames = { "queryId", "user" })
})
public class NamedDataset extends BaseEntity {
@Schema(description = "The associated Query")
@OneToOne
@JoinColumn(name = "queryId")
private Query query;

@Schema(description = "The user identifier")
@Column(length = 255)
private String user;

@Schema(description = "The name user has assigned to this dataset")
@Column(length = 255)
private String name;

@Schema(description = "The archived state")
private Boolean archived = false;

@Schema(description = "A json string object containing override specific values")
@Column(length = 8192)
@Convert(converter = JsonConverter.class)
private Map<String, Object> metadata;

public NamedDataset setName(String name) {
this.name = name;
return this;
}

public String getName() {
return name;
}

public NamedDataset setArchived(Boolean archived) {
this.archived = archived;
return this;
}

public Boolean getArchived() {
return archived;
}

public NamedDataset setQuery(Query query) {
this.query = query;
return this;
}

public Query getQuery(){
return query;
}

public NamedDataset setUser(String user) {
this.user = user;
return this;
}

public String getUser(){
return user;
}

public Map<String, Object> getMetadata(){
return metadata;
}

public NamedDataset setMetadata(Map<String, Object> metadata){
this.metadata = metadata;
return this;
}

@Override
public String toString() {
return Json.createObjectBuilder()
.add("uuid", uuid.toString())
.add("name", name)
.add("archived", archived)
.add("queryId", query.getUuid().toString())
.add("user", user)
.add("metadata", metadata.toString())
.build().toString();
}
}

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
package edu.harvard.dbmi.avillach.data.entity.convert;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;

import javax.persistence.AttributeConverter;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;

public class JsonConverter implements AttributeConverter<Map<String, Object>, String> {
private final Logger logger = LoggerFactory.getLogger(JsonConverter.class);

@Override
public String convertToDatabaseColumn(Map<String, Object> objectData) {
if (objectData == null) {
return "{}";
}

String jsonData = null;
try {
jsonData = new ObjectMapper().writeValueAsString(objectData);
} catch (final JsonProcessingException e) {
logger.error("JSON writing error", e);
}

return jsonData;
}

@Override
public Map<String, Object> convertToEntityAttribute(String jsonData) {
if (jsonData == null) {
return new HashMap<String, Object>();
}

Map<String, Object> objectData = null;
try {
objectData = new ObjectMapper().readValue(jsonData, new TypeReference<HashMap<String, Object>>() {});
} catch (final IOException e) {
logger.error("JSON reading error", e);
}

return objectData;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
package edu.harvard.dbmi.avillach.data.repository;

import edu.harvard.dbmi.avillach.data.entity.NamedDataset;

import javax.enterprise.context.ApplicationScoped;
import javax.transaction.Transactional;
import java.util.UUID;

@Transactional
@ApplicationScoped
public class NamedDatasetRepository extends BaseRepository<NamedDataset, UUID>{
protected NamedDatasetRepository() {super(NamedDataset.class);}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
package edu.harvard.dbmi.avillach.data.request;

import java.util.HashMap;
import java.util.Map;
import java.util.UUID;

import javax.validation.constraints.NotNull;
import javax.validation.constraints.Pattern;

import io.swagger.v3.oas.annotations.media.Schema;

@Schema(
description = "Request to add or update a named dataset.",
example = "{\n" + //
" \"queryId\": \"ec780aeb-d981-432a-b72b-51d4ecb3fd53\",\n" + //
" \"name\": \"My first Query\",\n" + //
" \"archived\": false\n" + //
" \"metadata\": {}\n" + //
"}"
)
public class NamedDatasetRequest {
@NotNull
private UUID queryId;

@NotNull
@Pattern(regexp = "^[\\w\\d \\-\\\\/?+=\\[\\].():\"']+$")
private String name;

private Boolean archived = false;

private Map<String, Object> metadata = new HashMap<String, Object>();

public UUID getQueryId() {
return queryId;
}

public void setQueryId(UUID query) {
this.queryId = query;
}

public String getName() {
return name;
}

public void setName(String name) {
this.name = name;
}

public Boolean getArchived(){
return archived;
}

public void setArchived(Boolean archived) {
this.archived = archived;
}

public Map<String, Object> getMetadata(){
return metadata;
}

public void setMetadata(Map<String, Object> metadata){
this.metadata = metadata;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
USE `picsure`;

CREATE TABLE `named_dataset` (
`uuid` binary(16) NOT NULL,
`queryId` binary(16) NOT NULL,
`user` varchar(255) COLLATE utf8_bin DEFAULT NULL,
`name` varchar(255) COLLATE utf8_bin DEFAULT NULL,
`archived` bit(1) NOT NULL DEFAULT FALSE,
`metadata` TEXT,
PRIMARY KEY (`uuid`),
CONSTRAINT `foreign_queryId` FOREIGN KEY (`queryId`) REFERENCES `query` (`uuid`),
CONSTRAINT `unique_queryId_user` UNIQUE (`queryId`, `user`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
Loading

0 comments on commit 3babb26

Please sign in to comment.