diff --git a/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/.gitignore b/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/.gitignore
new file mode 100644
index 0000000..46332b0
--- /dev/null
+++ b/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/.gitignore
@@ -0,0 +1,29 @@
+# Compiled class file
+*.class
+
+# Log file
+*.log
+
+# BlueJ files
+*.ctxt
+
+# Mobile Tools for Java (J2ME)
+.mtj.tmp/
+
+# Package Files #
+*.jar
+*.war
+*.nar
+*.ear
+*.zip
+*.tar.gz
+*.rar
+
+# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
+hs_err_pid*
+
+target/maven-archiver/
+
+target/maven-status/maven-compiler-plugin/compile/default-compile/
+
+target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/
diff --git a/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/README.md b/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/README.md
new file mode 100644
index 0000000..a1ea05e
--- /dev/null
+++ b/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/README.md
@@ -0,0 +1,9 @@
+# Java RESTful Engine S3 Repository Sample
+
+## Overview
+This sample demonstrates how to implement the IRepository storage plugin interface for AWS S3.
+This is sample code intended to show a basic example of how to implement this and is not intended to be used in production or at scale.
+If you run into issues in larger scale environments, consult AWS documentation for best practices.
+
+## Usage
+You can find the documentation for using the sample [here](https://fluent.apryse.com/documentation/engine-guide/Fluent%20RESTful%20Engines/JavaRestSotragePluginSample).
diff --git a/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/pom.xml b/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/pom.xml
new file mode 100644
index 0000000..9745d26
--- /dev/null
+++ b/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/pom.xml
@@ -0,0 +1,103 @@
+
+
+ 4.0.0
+
+ com.apryse.fluent
+ S3Repository
+ 1.0.0
+
+
+ 8
+ 8
+
+
+
+
+ software.amazon.awssdk
+ bom
+ 2.2.0
+ pom
+ import
+
+
+
+
+
+
+ windward-maven-repo
+ windward-maven-repo
+ https://windward.mycloudrepo.io/public/repositories/windward-libs
+
+
+ io.cloudrepo
+ https://windward.mycloudrepo.io/repositories/external-libs
+
+
+
+
+
+
+ net.windward
+ WindwardRepository
+ LATEST
+
+
+ com.amazonaws
+ aws-java-sdk-s3
+ LATEST
+
+
+ software.amazon.awssdk
+ s3-transfer-manager
+ LATEST
+
+
+ com.amazonaws
+ aws-java-sdk-dynamodb
+ LATEST
+
+
+ com.amazonaws
+ aws-java-sdk-core
+ LATEST
+
+
+ com.parse.bolts
+ bolts-tasks
+ 1.3.0
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-assembly-plugin
+ 3.7.1
+
+
+ make-assembly
+ package
+
+ single
+
+
+
+ jar-with-dependencies
+
+
+
+ S3RepositoryPlugin.S3Repository
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/src/main/java/S3RepositoryPlugin/S3Repository.java b/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/src/main/java/S3RepositoryPlugin/S3Repository.java
new file mode 100644
index 0000000..7d9eae4
--- /dev/null
+++ b/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/src/main/java/S3RepositoryPlugin/S3Repository.java
@@ -0,0 +1,466 @@
+package S3RepositoryPlugin;
+
+import S3Storage.JobInfoEntity;
+import S3Storage.JobRequestData;
+import S3Storage.S3StorageManager;
+import WindwardModels.*;
+import WindwardRepository.*;
+import bolts.CancellationToken;
+import bolts.CancellationTokenSource;
+import com.amazonaws.auth.AWSStaticCredentialsProvider;
+import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.regions.Regions;
+import com.amazonaws.services.s3.AmazonS3Client;
+import com.amazonaws.services.s3.AmazonS3ClientBuilder;
+import net.windward.util.LicenseException;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.time.Duration;
+import java.time.LocalDateTime;
+import java.util.UUID;
+
+/**
+ * NOTE: This is sample code and is not production ready. It is not optimized to run at scale. Intended for reference only
+ * for your own implementation.
+ */
+
+public class S3Repository implements IRepository {
+
+ private String bucketName = "";
+ private String awsAccessKey = "";
+ private String awsSecretKey = "";
+
+ BasicAWSCredentials aWSCredentials;
+ S3StorageManager storageManager;
+ private AmazonS3Client s3Client;
+
+
+ private AutoResetEvent eventSignal;
+ private boolean shutDown;
+ private Duration timeSpanDeleteOldJobs = Duration.ZERO;
+ private Duration timeSpanCheckOldJobs = Duration.ZERO;
+ private LocalDateTime datetimeLastCheckOldJobs = LocalDateTime.MIN;
+
+ private static final Logger Log = LogManager.getLogger(S3Repository.class);
+
+ public S3Repository()
+ {
+ Log.info("Initializing S3Repository");
+ aWSCredentials = new BasicAWSCredentials(awsAccessKey, awsSecretKey);
+ s3Client = (AmazonS3Client) AmazonS3ClientBuilder.standard().withCredentials(new AWSStaticCredentialsProvider(aWSCredentials)).withRegion(Regions.US_EAST_1).build();
+ storageManager = new S3StorageManager(aWSCredentials, s3Client, bucketName);
+ int hours;
+ try {
+ hours = Integer.parseInt(System.getProperty("hours-delete-jobs"));
+ } catch (NumberFormatException e) {
+ hours = 24;
+ }
+ timeSpanDeleteOldJobs = Duration.ofHours(hours);
+
+ timeSpanCheckOldJobs = Duration.ofMinutes(timeSpanDeleteOldJobs.toMinutes() / 96);
+
+ datetimeLastCheckOldJobs = LocalDateTime.now();
+
+ if (Log.isInfoEnabled()) {
+ Log.info(
+ String.format("starting FileSystemRepository, Delete jobs older than %1$s", timeSpanDeleteOldJobs));
+ }
+
+
+ eventSignal = new AutoResetEvent(true);
+
+ // This thread manages all the background threads. It sleeps on an event and
+ // when awoken, fires off anything it can.
+ // This is used so web requests that call signal aren't delayed as background
+ // tasks might be started.
+ CancellationTokenSource tokenSource = new CancellationTokenSource();
+ CancellationToken token = tokenSource.getToken();
+
+ token.register(() -> eventSignal.set());
+
+ new Thread((new Runnable() {
+ CancellationToken cancelToken;
+
+ public Runnable pass(CancellationToken ct) {
+ this.cancelToken = ct;
+ return this;
+ }
+
+ public void run() {
+ manageRequests(cancelToken);
+ }
+ }.pass(token))).start();
+
+ }
+
+ private IJobHandler JobHandler;
+
+ private IJobHandler getJobHandler() {
+ return JobHandler;
+ }
+
+ @Override
+ public final void SetJobHandler(IJobHandler handler) {
+ if (Log.isDebugEnabled())
+ Log.debug(String.format("SetJobHandler(%1$s)", handler));
+ JobHandler = handler;
+ }
+
+ @Override
+ public void shutDown() {
+ boolean tmp = storageManager.revertGeneratingJobsPending();
+ }
+
+ private void manageRequests(CancellationToken cancelToken) {
+ while (!shutDown && !cancelToken.isCancellationRequested()) {
+ if (datetimeLastCheckOldJobs.plus(timeSpanCheckOldJobs).isBefore(LocalDateTime.now())) {
+ deleteOldJobs();
+ datetimeLastCheckOldJobs = LocalDateTime.now();
+ }
+
+ // wait until needed again, or cancelled, or time to check for jobs.
+ try {
+ eventSignal.waitOne(timeSpanCheckOldJobs.toMillis());
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ @Override
+ public String createRequest(Template template, RepositoryStatus.REQUEST_TYPE request_type) {
+ try {
+ template.setGuid(UUID.randomUUID().toString());
+ JobRequestData jobData = new JobRequestData(template, request_type, LocalDateTime.now());
+ Log.info("[S3RepoPlugin] Created request " + jobData.Template.getGuid());
+
+ storageManager.AddRequest(jobData);
+
+ if (getJobHandler() != null) {
+ getJobHandler().Signal();
+ }
+
+ return template.getGuid();
+ }
+ catch(Exception ex)
+ {
+ Log.error("[S3RepoPlugin] createRequest() Failed to created request " + template.getGuid());
+ }
+
+ return null;
+ }
+
+ @Override
+ public RepositoryRequest takeRequest() {
+
+ Log.info("[S3RepoPlugin] Take request called");
+ try
+ {
+ JobRequestData job = storageManager.getOldestJobAndGenerate();
+ if(job != null)
+ {
+ Log.info("[S3RepoPlugin] Took requests: "+job.Template.getGuid());
+ return new RepositoryRequest(job.Template, job.RequestType);
+ }
+ else
+ {
+ Log.info("[S3RepoPlugin] takeRequest() returning null. No requests are in the queue.");
+ return null;
+ }
+ }
+ catch (Exception ex)
+ {
+ Log.error("[S3RepoPlugin] TakeRequest Error in taking request: " +ex);
+ }
+ return null;
+ }
+
+ @Override
+ public void saveReport(Template template, Document document) throws IOException
+ {
+ boolean res = storageManager.completeRequest(template.getGuid(), document);
+
+ if(res)
+ {
+ Log.info("[S3RepoPlugin] saveRequest() saved request successfully: "+template.getGuid());
+ completeJob(template);
+ }
+ else
+ {
+ Log.error("[S3RepoPlugin] saveRequest() error saving request: "+template.getGuid());
+ }
+ }
+
+
+ private void completeJob(Template template)
+ {
+ if (shutDown || (template.getCallback() == null || template.getCallback().length() == 0))
+ return;
+ String url = template.getCallback().replace("{guid}", template.getGuid());
+ CloseableHttpClient httpClient = null;
+ CloseableHttpResponse response = null;
+ try {
+ HttpPost post = new HttpPost(url);
+
+ httpClient = HttpClients.createDefault();
+ response = httpClient.execute(post);
+ if (response.getStatusLine().getStatusCode() != 200 && Log.isInfoEnabled()) {
+ Log.info(String.format("Callback to %1$s returned status code %2$s", url,
+ response.getStatusLine().getStatusCode()));
+ }
+ httpClient.close();
+ response.close();
+ } catch (RuntimeException | IOException ex) {
+ try {
+ httpClient.close();
+ response.close();
+ } catch (Exception e) {
+ // nothing to do here
+ }
+ Log.warn(String.format("Callback for job %1$s to url %2$s threw exception %3$s", template.getGuid(),
+ template.getCallback(), ex.getMessage()), ex);
+ // silently swallow the exception - this is a background thread.
+ }
+
+ }
+
+ @Override
+ public void saveError(Template template, ServiceError serviceError)
+ {
+ try
+ {
+ boolean result;
+ if(serviceError.getType().equals("net.windward.util.LicenseException")){
+ result = storageManager.UpdateRequest(template.getGuid(), RepositoryStatus.JOB_STATUS.LicenseError);
+ }
+ else {
+ result = storageManager.UpdateRequest(template.getGuid(), RepositoryStatus.JOB_STATUS.Error);
+ }
+
+ if(!result) {
+ Log.error("[S3RepoPlugin] saveError() error saving error status: "+template.getGuid());
+ }
+
+ result = storageManager.completeRequest(template.getGuid(), serviceError);
+
+ if(result) {
+ Log.info("[S3RepoPlugin] saveError() Successfully saved error status: "+template.getGuid());
+ }
+ else {
+ Log.error("[S3RepoPlugin] saveError() error saving error status: "+template.getGuid());
+ }
+ }
+ catch (IOException ex)
+ {
+ Log.error("[S3RepoPlugin] saveError() error saving error status: "+template.getGuid(), ex);
+ }
+ }
+
+ @Override
+ public void saveTagTree(Template template, TagTree tagTree)
+ {
+ try
+ {
+ boolean res = storageManager.completeRequest(template.getGuid(), tagTree);
+ if(res)
+ {
+ Log.info("[S3RepoPlugin] saveTagTree() saved tagTree successfully: "+template.getGuid());
+ completeJob(template);
+ }
+ else
+ {
+ Log.error("[S3RepoPlugin] saveTagTree() error saving tagtree: "+template.getGuid());
+ }
+ }
+ catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Override
+ public void saveMetrics(Template template, Metrics metrics)
+ {
+ boolean res;
+ try
+ {
+ res = storageManager.completeRequest(template.getGuid(), metrics);
+ if(res)
+ {
+ Log.info("[S3RepoPlugin] saveMetrics() saved metrics successfully: "+template.getGuid());
+ completeJob(template);
+ }
+ else
+ {
+ Log.error("[S3RepoPlugin] saveMetrics() error saving metrics: "+template.getGuid());
+ }
+ }
+ catch (IOException e) {
+ Log.error("[S3RepoPlugin] saveMetrics() error saving metrics: "+template.getGuid(), e);
+ }
+
+ }
+
+ @Override
+ public RequestStatus getReportStatus(String guid) {
+ try
+ {
+ JobInfoEntity result = storageManager.getRequestInfo(guid);
+ return new RequestStatus(RepositoryStatus.JOB_STATUS.forValue(result.Status), RepositoryStatus.REQUEST_TYPE.forValue(result.Type));
+ }
+ catch(Exception ex) {
+ Log.error("[S3RepoPlugin] saveMetrics() error getting report status: "+ guid, ex);
+ }
+ return null;
+ }
+
+ @Override
+ public Document getReport(String guid) {
+ try
+ {
+ Document result = storageManager.getGeneratedReport(guid);
+ Log.info("[S3RepoPlugin] getReport() got report Successfully: "+ guid);
+ return result;
+
+ }
+ catch (Exception ex)
+ {
+ Log.error("[S3RepoPlugin] getReport() error getting report: "+guid, ex);
+ }
+ return null;
+ }
+
+ @Override
+ public ServiceError getError(String guid) {
+ try
+ {
+ ServiceError result = storageManager.getError(guid);
+ Log.info("[S3RepoPlugin] getError() got error Successfully: "+ guid);
+ return result;
+
+ }
+ catch (Exception ex)
+ {
+ Log.error("[S3RepoPlugin] getError() error getting error: "+guid, ex);
+ }
+ return null;
+ }
+
+ @Override
+ public TagTree getTagTree(String guid) {
+ try
+ {
+ TagTree result = storageManager.getTagTree(guid);
+ Log.info("[S3RepoPlugin] getTagTree() got TagTree Successfully: "+ guid);
+ return result;
+
+ }
+ catch (Exception ex)
+ {
+ Log.error("[S3RepoPlugin] getTagTree() error getting TagTree: "+guid, ex);
+ }
+ return null;
+ }
+
+ @Override
+ public Metrics getMetrics(String guid) {
+ try
+ {
+ Metrics result = storageManager.getMetrics(guid);
+ Log.info("[S3RepoPlugin] getMetrics() got metrics Successfully: "+ guid);
+ return result;
+
+ }
+ catch (Exception ex)
+ {
+ Log.error("[S3RepoPlugin] getMetrics() error getting metrics: "+guid, ex);
+ }
+ return null;
+ }
+
+ @Override
+ public void deleteReport(String guid)
+ {
+ try
+ {
+ JobInfoEntity jobInfo = storageManager.getRequestInfo(guid);
+ storageManager.deleteRequest(jobInfo);
+ Log.error("[S3RepoPlugin] deleteReport() successfully deleted report: "+guid);
+ }
+ catch (Exception ex)
+ {
+ Log.error("[S3RepoPlugin] deleteReport() error deleting report: "+guid, ex);
+ }
+ }
+
+ private void deleteOldJobs() {
+ LocalDateTime oldDateTime = LocalDateTime.now().minus(timeSpanDeleteOldJobs);
+ storageManager.deleteOldRequests(oldDateTime);
+ }
+
+ @Override
+ public DocumentMeta getMeta(String guid)
+ {
+ try {
+ Document doc = storageManager.getGeneratedReport(guid);
+ DocumentMeta docMeta = setReportMeta(doc);
+ return docMeta;
+ }
+ catch (Exception ex)
+ {
+ Log.error("[S3RepoPlugin] getMeta() error getting document meta: "+guid, ex);
+ }
+ return null;
+ }
+
+ private DocumentMeta setReportMeta(Document doc) {
+ DocumentMeta docMeta = new DocumentMeta();
+ docMeta.setGuid(doc.getGuid());
+ docMeta.setNumberOfPages(doc.getNumberOfPages());
+ docMeta.setImportInfo(doc.getImportInfo());
+ docMeta.setTag(doc.getTag());
+ docMeta.setErrors(doc.getErrors());
+
+ return docMeta;
+ }
+
+
+ /*
+ * Implement the following 2 methods to make use of document performance feature. https://fluent.apryse.com/documentation/engine-guide/Fluent%20RESTful%20Engines/JavaRestSotragePlugin#methods-and-variables
+ */
+
+ @Override
+ public void saveDocumentPerformanceObject(DocumentPerformance documentPerformance, String s) {
+
+ }
+
+ @Override
+ public DocumentPerformance getDocumentPerformanceObject(String s) {
+ return null;
+ }
+
+
+ /*
+ * Implement the following methods to make use of cached resource feature. https://fluent.apryse.com/documentation/engine-guide/Fluent%20RESTful%20Engines/JavaRestSotragePlugin#methods-and-variables
+ */
+ @Override
+ public void saveCachedResource(CachedResource cachedResource) throws Exception {
+
+ }
+
+ @Override
+ public net.windward.util.AccessProviders.models.CachedResource getCachedResource(String s) {
+ return null;
+ }
+
+ @Override
+ public void deleteCachedResource(String s) throws FileNotFoundException {
+
+ }
+}
diff --git a/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/src/main/java/S3Storage/JobInfoEntity.java b/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/src/main/java/S3Storage/JobInfoEntity.java
new file mode 100644
index 0000000..f0cf6db
--- /dev/null
+++ b/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/src/main/java/S3Storage/JobInfoEntity.java
@@ -0,0 +1,98 @@
+package S3Storage;
+
+import com.amazonaws.services.dynamodbv2.datamodeling.*;
+
+import java.io.Serializable;
+import java.time.LocalDateTime;
+
+/**
+ * NOTE: This is sample code and is not production ready. It is not optimized to run at scale. Intended for reference only
+ * for your own implementation.
+ */
+
+@DynamoDBTable(tableName="{YOUR_TABLE_NAME}")
+public class JobInfoEntity implements Serializable {
+
+ public String Guid;
+ public String RangeKey;
+ public int Type;
+ public int Status;
+ public LocalDateTime CreationDate;
+
+ public static String RANGE_KEY_EXT = "-RangeKey";
+
+ public JobInfoEntity()
+ {
+
+ }
+ public JobInfoEntity(String guid, int type, int status, LocalDateTime creationDate)
+ {
+ this.Guid = guid;
+ this.Type = type;
+ this.Status = status;
+ this.CreationDate = creationDate;
+ this.RangeKey = guid + RANGE_KEY_EXT;
+ }
+ public static JobInfoEntity FromJobRequestData(JobRequestData data)
+ {
+ return new JobInfoEntity(data.getTemplate().getGuid(), (int)data.RequestType.getValue(), -1, data.getCreationDate());
+ }
+
+ @DynamoDBHashKey(attributeName="Guid")
+ public String getGuid() {
+ return Guid;
+ }
+ public void setGuid(String guid) {
+ Guid = guid;
+ }
+
+ @DynamoDBAttribute(attributeName="Type")
+ public int getType() {
+ return Type;
+ }
+ public void setType(int type) {
+ Type = type;
+ }
+
+ @DynamoDBAttribute(attributeName="Status")
+ public int getStatus() {
+ return Status;
+ }
+ public void setStatus(int status) {
+ Status = status;
+ }
+
+ @DynamoDBAttribute(attributeName="CreationDate")
+ @DynamoDBTypeConverted( converter = LocalDateTimeConverter.class )
+ public LocalDateTime getCreationDate() {
+ return CreationDate;
+ }
+ public void setCreationDate(LocalDateTime creationDate) {
+ CreationDate = creationDate;
+ }
+
+ @DynamoDBRangeKey(attributeName="RangeKey")
+ public String getRangeKey() {
+ return RangeKey;
+ }
+
+ public void setRangeKey(String rangeKey) {
+ RangeKey = rangeKey;
+ }
+
+
+ static public class LocalDateTimeConverter implements DynamoDBTypeConverter {
+
+ @Override
+ public String convert( final LocalDateTime time ) {
+
+ return time.toString();
+ }
+
+ @Override
+ public LocalDateTime unconvert( final String stringValue ) {
+
+ return LocalDateTime.parse(stringValue);
+ }
+ }
+}
diff --git a/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/src/main/java/S3Storage/JobRequestData.java b/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/src/main/java/S3Storage/JobRequestData.java
new file mode 100644
index 0000000..9035ca2
--- /dev/null
+++ b/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/src/main/java/S3Storage/JobRequestData.java
@@ -0,0 +1,60 @@
+package S3Storage;
+
+
+import WindwardModels.Template;
+import WindwardRepository.RepositoryStatus;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.time.LocalDateTime;
+
+
+/**
+ * NOTE: This is sample code and is not production ready. It is not optimized to run at scale. Intended for reference only
+ * for your own implementation.
+ */
+
+@XmlRootElement(name="JobRequestData")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class JobRequestData implements Serializable {
+
+ @XmlElement(name = "Template")
+ public Template Template;
+
+ @XmlElement(name = "RequestType")
+ public RepositoryStatus.REQUEST_TYPE RequestType;
+
+ @XmlElement(name = "CreationDate")
+ public LocalDateTime CreationDate;
+
+ public JobRequestData(Template template, RepositoryStatus.REQUEST_TYPE requestType, LocalDateTime creationDate)
+ {
+ this.Template = template;
+ this.RequestType = requestType;
+ this.CreationDate = creationDate;
+ }
+
+ public void setTemplate(WindwardModels.Template template) {
+ Template = template;
+ }
+ public Template getTemplate() { return this.Template; }
+
+ public void setRequestType(RepositoryStatus.REQUEST_TYPE requestType) {
+ RequestType = requestType;
+ }
+
+ public RepositoryStatus.REQUEST_TYPE getRequestType() {
+ return RequestType;
+ }
+
+ public void setCreationDate(LocalDateTime creationDate) {
+ CreationDate = creationDate;
+ }
+
+ public LocalDateTime getCreationDate() {
+ return CreationDate;
+ }
+}
diff --git a/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/src/main/java/S3Storage/S3StorageManager.java b/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/src/main/java/S3Storage/S3StorageManager.java
new file mode 100644
index 0000000..e9314ab
--- /dev/null
+++ b/RESTful Engine/Plugins/Java RESTful/Java-RESTful-S3-Repository-Sample/src/main/java/S3Storage/S3StorageManager.java
@@ -0,0 +1,354 @@
+package S3Storage;
+
+import WindwardModels.*;
+import WindwardRepository.RepositoryStatus;
+import com.amazonaws.AmazonServiceException;
+import com.amazonaws.ClientConfiguration;
+import com.amazonaws.auth.AWSStaticCredentialsProvider;
+import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.regions.Regions;
+import com.amazonaws.retry.PredefinedRetryPolicies;
+import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
+import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder;
+import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper;
+import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapperConfig;
+import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBScanExpression;
+import com.amazonaws.services.dynamodbv2.document.DynamoDB;
+import com.amazonaws.services.dynamodbv2.document.Table;
+import com.amazonaws.services.dynamodbv2.model.AttributeValue;
+import com.amazonaws.services.dynamodbv2.model.ComparisonOperator;
+import com.amazonaws.services.dynamodbv2.model.Condition;
+import com.amazonaws.services.s3.AmazonS3Client;
+import com.amazonaws.services.s3.model.GetObjectRequest;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import com.amazonaws.services.s3.model.PutObjectRequest;
+import com.amazonaws.services.s3.model.S3Object;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.Charset;
+import java.time.LocalDateTime;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.concurrent.Semaphore;
+
+
+/**
+ * NOTE: This is sample code and is not production ready. It is not optimized to run at scale. Intended for reference only
+ * for your own implementation.
+ */
+
+public class S3StorageManager {
+
+ private static AmazonDynamoDB client;
+ protected static DynamoDB dynamoDB;
+ private AmazonS3Client s3Client;
+ private DynamoDBMapper dynamoDBMapper;
+
+ private static String bucketName;
+ private static String documentsFolder;
+ private static String templatesFolder;
+
+ private Semaphore semaphore;
+
+ private static final Logger log = LogManager.getLogger(S3StorageManager.class);
+
+
+ public S3StorageManager(BasicAWSCredentials aWSCredentials, AmazonS3Client s3Client, String bucketName)
+ {
+ createClient(aWSCredentials);
+ S3StorageManager.bucketName = bucketName;
+ documentsFolder = "Documents/";
+ templatesFolder = "Templates/";
+
+ this.s3Client = s3Client;
+ this.semaphore = new Semaphore(1);
+ }
+
+ public boolean AddRequest(JobRequestData requestData)
+ {
+ JobInfoEntity entity = JobInfoEntity.FromJobRequestData(requestData);
+ entity.Status = (int)RepositoryStatus.JOB_STATUS.Pending.getValue();
+
+ try
+ {
+ semaphore.acquire();
+ dynamoDBMapper.save(entity);
+
+ log.debug("[S3StorageManager AddRequest] Added template ["+ requestData.Template.getGuid() +"] to blob storage");
+
+ ObjectMapper mapper = new ObjectMapper();
+ String test = mapper.writeValueAsString(requestData.Template);
+
+ InputStream stream = new ByteArrayInputStream(test.getBytes
+ (Charset.forName("UTF-8")));
+
+ PutObjectRequest objectRequest = new PutObjectRequest(bucketName, templatesFolder+entity.Guid, stream, null);
+
+ s3Client.putObject(objectRequest);
+ log.debug("[S3StorageManager AddRequest] Successfully added template ["+ requestData.Template.getGuid() +"]");
+
+ return true;
+ }
+ catch (Exception ex)
+ {
+ log.error("[S3StorageManager AddRequest] Error adding request: ", ex);
+ return false;
+ }
+ finally {
+ semaphore.release();
+ }
+ }
+
+
+ public boolean UpdateRequest(String guid, RepositoryStatus.JOB_STATUS newStatus)
+ {
+ JobInfoEntity entity = getRequestInfo(guid);
+ entity.Status = (int)newStatus.getValue();
+ boolean success = false;
+ try
+ {
+ dynamoDBMapper.save(entity);
+ log.info("[S3StorageManager] Updated request: " + guid);
+ success = true;
+ return success;
+ }
+ catch (Exception ex)
+ {
+ log.error("[S3StorageManager] Error updating request: ", ex);
+ return success;
+ }
+
+ }
+
+ public boolean completeRequest(String guid, T generatedEntity) throws IOException {
+ JobInfoEntity entity = getRequestInfo(guid);
+
+ ObjectMapper mapper = new ObjectMapper();
+ String json = mapper.writeValueAsString(generatedEntity);
+
+ InputStream stream = new ByteArrayInputStream(json.getBytes
+ (Charset.forName("UTF-8")));
+
+ if(!(generatedEntity instanceof ServiceError))
+ {
+ entity.setStatus(RepositoryStatus.JOB_STATUS.Complete.getValue());
+ }
+
+ PutObjectRequest req = new PutObjectRequest(bucketName, documentsFolder+ guid, stream, new ObjectMetadata());
+ try {
+ semaphore.acquire();
+ s3Client.putObject(req);
+ dynamoDBMapper.save(entity);
+
+ log.info(String.format("[S3StorageManager completeRequest] Completed request [%s]", guid));
+
+ return true;
+ }
+ catch (AmazonServiceException | InterruptedException ex)
+ {
+ log.error("[S3StorageManager completeRequest threw an error when trying to put object in S3: "+ex);
+ return false;
+ }
+ finally
+ {
+ semaphore.release();
+ }
+
+ }
+
+ public boolean deleteRequest(JobInfoEntity job)
+ {
+ try {
+
+ dynamoDBMapper.delete(job);
+
+ s3Client.deleteObject(documentsFolder, job.Guid);
+ s3Client.deleteObject(templatesFolder, job.Guid);
+
+ return true;
+ }
+ catch (Exception ex)
+ {
+ log.error("[S3StorageManager] deleteRequest threw an error when trying to delete object in dynamo table: "+ex);
+ return false;
+ }
+ }
+
+ public boolean revertGeneratingJobsPending()
+ {
+ try {
+
+ semaphore.acquire();
+
+ DynamoDBScanExpression scanExpression = new DynamoDBScanExpression();
+
+ scanExpression.addFilterCondition("Status", new Condition().withComparisonOperator(ComparisonOperator.EQ).
+ withAttributeValueList(new AttributeValue().withN(String.valueOf(RepositoryStatus.JOB_STATUS.Generating.getValue()))));
+
+
+ List tmp = dynamoDBMapper.scan(JobInfoEntity.class, scanExpression);
+ for ( JobInfoEntity job : tmp)
+ {
+ UpdateRequest(job.Guid, RepositoryStatus.JOB_STATUS.Pending);
+ }
+ return true;
+ }
+ catch (Exception ex)
+ {
+ log.error("[S3StorageManager] revertGeneratingJobsPending() threw an error when trying to revert generating jobs: "+ex);
+ return false;
+ }
+ finally {
+ semaphore.release();
+ }
+ }
+
+ public void deleteOldRequests(LocalDateTime cutoff)
+ {
+ DynamoDBScanExpression scanExpression = new DynamoDBScanExpression();
+ scanExpression.addFilterCondition("CreationDate", new Condition().withComparisonOperator(ComparisonOperator.LE).
+ withAttributeValueList(new AttributeValue().withN(String.valueOf(cutoff))));
+
+ try{
+ List tmp = dynamoDBMapper.scan(JobInfoEntity.class, scanExpression);
+ for ( JobInfoEntity job : tmp)
+ {
+ deleteRequest(job);
+ }
+ }
+ catch (Exception ex)
+ {
+ log.error("[S3StorageManager] deleteOldRequests() threw an error when trying to delete old jobs: "+ex);
+ }
+ }
+
+ public JobRequestData getOldestJobAndGenerate()
+ {
+ try {
+
+ semaphore.acquire();
+
+ DynamoDBScanExpression scanExpression = new DynamoDBScanExpression();
+
+ scanExpression.addFilterCondition("Status", new Condition().withComparisonOperator(ComparisonOperator.EQ).
+ withAttributeValueList(new AttributeValue().withN(String.valueOf(RepositoryStatus.JOB_STATUS.Pending.getValue()))));
+
+ List entities = new ArrayList<>(dynamoDBMapper.scan(JobInfoEntity.class, scanExpression, new DynamoDBMapperConfig(DynamoDBMapperConfig.ConsistentReads.CONSISTENT)));
+
+ log.info(String.format("[S3StorageManager] Found %d pending jobs to be processed", entities.size()));
+
+ if(entities.size() == 0)
+ return null;
+
+ return retrieveOldestRequest(entities);
+ }
+ catch (Exception ex)
+ {
+ log.error("[S3StorageManager] getOldestJobAndGenerate() threw an error when trying to generate oldest jobs: "+ex);
+ return null;
+ }
+ finally {
+ semaphore.release();
+ }
+ }
+
+ /**
+ * Sorts entities and returns the oldest request to be processed
+ * @param entities The current outstanding requests to be processed
+ * @return The entity to be processed
+ */
+ private JobRequestData retrieveOldestRequest(List entities) {
+ Collections.sort(entities, new SortByDate());
+
+ for(int i = 0; i < entities.size(); i++) {
+ JobInfoEntity oldestEntity = entities.get(i);
+ Template template = getEntityFromBlob(oldestEntity.Guid, templatesFolder, Template.class);
+ if(template == null) {
+ deleteRequest(oldestEntity);
+ log.info(String.format("[S3StorageManager] Deleted job entity [%s] due to null template.", oldestEntity.getGuid()));
+ continue; // If the template is null, delete the request and continue to next oldest request
+ }
+
+ log.info(String.format("[S3StorageManager] Updated job entity [%s] to generating.", oldestEntity.getGuid()));
+
+ oldestEntity.Status = RepositoryStatus.JOB_STATUS.Generating.getValue();
+ dynamoDBMapper.save(oldestEntity);
+
+ return new JobRequestData(template, RepositoryStatus.REQUEST_TYPE.forValue(oldestEntity.Type), oldestEntity.CreationDate);
+ }
+
+ // No entities to be processed
+ log.info("[S3StorageManager] No entities to be processed.");
+ return null;
+ }
+
+ private T getEntityFromBlob(String guid, String folderName, Class typeParameterClass) {
+ try{
+ GetObjectRequest request = new GetObjectRequest(bucketName, folderName+guid);
+
+ S3Object res = s3Client.getObject(request);
+
+ ObjectMapper objectMapper = new ObjectMapper();
+
+ T obj = objectMapper.readValue(res.getObjectContent().getDelegateStream(), typeParameterClass);
+
+ return obj;
+ }
+ catch(Exception ex)
+ {
+ log.error("[S3StorageManager] getEntityFromBlob() threw an error when trying to get entity from S3: "+ex);
+ }
+
+ return null;
+ }
+
+ private void createClient(BasicAWSCredentials aWSCredentials) {
+ ClientConfiguration cf = new ClientConfiguration().withConnectionTimeout(2000).withClientExecutionTimeout(2000).withRequestTimeout(2000).withSocketTimeout(2000).withRetryPolicy(PredefinedRetryPolicies.getDynamoDBDefaultRetryPolicyWithCustomMaxRetries(15));
+ client = AmazonDynamoDBClientBuilder.standard().withRegion(Regions.US_EAST_1).withCredentials(new AWSStaticCredentialsProvider(aWSCredentials)).withClientConfiguration(cf).build();
+ dynamoDB = new DynamoDB(client);
+ dynamoDBMapper = new DynamoDBMapper(client);
+ }
+
+ public Document getGeneratedReport(String guid)
+ {
+ return getEntityFromBlob(guid, documentsFolder, Document.class);
+ }
+
+ public ServiceError getError(String guid)
+ {
+ return getEntityFromBlob(guid, documentsFolder, ServiceError.class);
+ }
+
+ public Metrics getMetrics(String guid)
+ {
+ return getEntityFromBlob(guid, documentsFolder, Metrics.class);
+ }
+
+ public TagTree getTagTree(String guid)
+ {
+ return getEntityFromBlob(guid, documentsFolder, TagTree.class);
+ }
+
+ public JobInfoEntity getRequestInfo(String guid)
+ {
+ String rangeKey = guid + JobInfoEntity.RANGE_KEY_EXT;
+ JobInfoEntity res = dynamoDBMapper.load(JobInfoEntity.class, guid, rangeKey, new DynamoDBMapperConfig(DynamoDBMapperConfig.ConsistentReads.CONSISTENT));
+ return res;
+ }
+
+ static class SortByDate implements Comparator {
+ @Override
+ public int compare(JobInfoEntity a, JobInfoEntity b) {
+ return a.CreationDate.compareTo(b.CreationDate);
+ }
+ }
+}
+
+