Skip to content

Commit

Permalink
Fixed bugs in upload recovery logic
Browse files Browse the repository at this point in the history
  • Loading branch information
datasetutil committed Nov 22, 2014
1 parent fc4d2c9 commit c641528
Showing 1 changed file with 12 additions and 7 deletions.
19 changes: 12 additions & 7 deletions src/main/java/com/sforce/dataset/loader/DatasetLoader.java
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ public static boolean uploadDataset(String inputFileString,
boolean status = true;
long digestTime = 0L;
long uploadTime = 0L;
boolean isRecovery = false;
boolean updateHdrJson = false;
//we only want a small capacity otherwise the reader thread will runaway and the writer thread will become slower
BlockingQueue<String[]> q = new LinkedBlockingQueue<String[]>(3);

Expand Down Expand Up @@ -236,6 +236,10 @@ public static boolean uploadDataset(String inputFileString,
if(hdrId==null)
{
hdrId = insertFileHdr(partnerConnection, datasetAlias,datasetFolder, FileUtils.readFileToByteArray(metadataJson), uploadFormat, Operation);
}else
{
System.out.println("Record {"+hdrId+"} is being reused from InsightsExternalData");
updateHdrJson = true;
}
if(hdrId ==null || hdrId.isEmpty())
{
Expand Down Expand Up @@ -465,14 +469,14 @@ public static boolean uploadDataset(String inputFileString,
}else
{
System.out.println("Recovering process from last file {"+lastgzbinFile+"} upload");
isRecovery = true;
updateHdrJson = false; //The file is already digested, we cannot update the hdr now
gzbinFile = lastgzbinFile;
}

//Upload the file
// if(useSoapAPI)
long startTime = System.currentTimeMillis();
status = DatasetLoader.uploadEM(gzbinFile, uploadFormat, ExternalFileSchema.getSchemaFile(inputFile), datasetAlias,datasetFolder, useBulkAPI, partnerConnection, hdrId, datasetArchiveDir, "Overwrite", isRecovery);
status = uploadEM(gzbinFile, uploadFormat, ExternalFileSchema.getSchemaFile(inputFile), datasetAlias,datasetFolder, useBulkAPI, partnerConnection, hdrId, datasetArchiveDir, "Overwrite", updateHdrJson);
long endTime = System.currentTimeMillis();
uploadTime = endTime-startTime;

Expand Down Expand Up @@ -516,15 +520,15 @@ public static boolean uploadDataset(String inputFileString,
* @return boolean status of the upload
* @throws Exception
*/
public static boolean uploadEM(File dataFile, String dataFormat, File metadataJson, String datasetAlias,String datasetFolder, boolean useBulk, PartnerConnection partnerConnection, String hdrId, File datasetArchiveDir, String Operation, boolean isRecovery) throws Exception
public static boolean uploadEM(File dataFile, String dataFormat, File metadataJson, String datasetAlias,String datasetFolder, boolean useBulk, PartnerConnection partnerConnection, String hdrId, File datasetArchiveDir, String Operation, boolean updateHdrJson) throws Exception
{
byte[] metadataJsonBytes = null;
if(metadataJson != null && metadataJson.canRead())
metadataJsonBytes = FileUtils.readFileToByteArray(metadataJson);
else
System.err.println("warning: metadata Json file {"+metadataJson+"} not found");

return uploadEM(dataFile, dataFormat, metadataJsonBytes, datasetAlias, datasetFolder, useBulk, partnerConnection, hdrId, datasetArchiveDir, Operation, isRecovery);
return uploadEM(dataFile, dataFormat, metadataJsonBytes, datasetAlias, datasetFolder, useBulk, partnerConnection, hdrId, datasetArchiveDir, Operation, updateHdrJson);
}

/**
Expand All @@ -540,7 +544,7 @@ public static boolean uploadEM(File dataFile, String dataFormat, File metadataJs
* @return boolean status of the upload
* @throws Exception
*/
public static boolean uploadEM(File dataFile, String dataFormat, byte[] metadataJsonBytes, String datasetAlias,String datasetFolder, boolean useBulk, PartnerConnection partnerConnection, String hdrId, File datasetArchiveDir, String Operation, boolean isRecovery) throws Exception
public static boolean uploadEM(File dataFile, String dataFormat, byte[] metadataJsonBytes, String datasetAlias,String datasetFolder, boolean useBulk, PartnerConnection partnerConnection, String hdrId, File datasetArchiveDir, String Operation, boolean updateHdrJson) throws Exception
{
if(datasetAlias==null||datasetAlias.trim().isEmpty())
{
Expand All @@ -565,7 +569,8 @@ public static boolean uploadEM(File dataFile, String dataFormat, byte[] metadata
hdrId = insertFileHdr(partnerConnection, datasetAlias,datasetFolder, metadataJsonBytes, dataFormat, Operation);
}else
{
if(isRecovery)
LinkedList<Integer> existingFileParts = getUploadedFileParts(partnerConnection, hdrId);
if(updateHdrJson && existingFileParts.isEmpty())
updateFileHdr(partnerConnection, hdrId, datasetAlias, datasetFolder, metadataJsonBytes, dataFormat, "None", Operation);
}

Expand Down

0 comments on commit c641528

Please sign in to comment.