Skip to content

Commit

Permalink
HPCC4J-551 HPCCFile Make TLK Use Optional
Browse files Browse the repository at this point in the history
- Added an option to HPCCFile to make TLK reading optional

Signed-off-by: James McMullan [email protected]
  • Loading branch information
jpmcmu committed Oct 27, 2023
1 parent 28183cf commit 36f4050
Showing 1 changed file with 49 additions and 9 deletions.
58 changes: 49 additions & 9 deletions dfsclient/src/main/java/org/hpccsystems/dfs/client/HPCCFile.java
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ public class HPCCFile implements Serializable

private DataPartition[] dataParts;
private DataPartition tlkPartition = null;
private boolean useTLK = true;
private PartitionProcessor partitionProcessor = null;
private long dataPartsCreationTimeMS = -1;

Expand Down Expand Up @@ -130,12 +131,44 @@ public HPCCFile(String fileName, String connectionString, String user, String pa
*/
public HPCCFile(String fileName, Connection espconninfo, String targetColumnList, String filter, RemapInfo remap_info, int maxParts,
String targetfilecluster) throws HpccFileException
{
this(fileName, espconninfo, targetColumnList, filter, remap_info, maxParts, targetfilecluster, true);
}

/**
* Constructor for the HpccFile. Captures HPCC logical file information from the DALI Server for the clusters behind
* the ESP named by the IP address and re-maps the address information for the THOR nodes to visible addresses when
* the THOR clusters are virtual.
*
* @param fileName
* The HPCC file name
* @param espconninfo
* the espconninfo
* @param targetColumnList
* a comma separated list of column names in dotted notation for columns within compound columns.
* @param filter
* a file filter to select records of interest (SQL where syntax)
* @param remap_info
* address and port re-mapping info for THOR cluster
* @param maxParts
* optional the maximum number of partitions or zero for no max
* @param targetfilecluster
* optional - the hpcc cluster the target file resides in
* @param useTLK
* optional - whether or not the top level key should be used to help filter index files
* @throws HpccFileException
* the hpcc file exception
*/
public HPCCFile(String fileName, Connection espconninfo, String targetColumnList, String filter, RemapInfo remap_info, int maxParts,
String targetfilecluster, boolean useTLK) throws HpccFileException
{
this.fileName = fileName;
this.recordDefinition = null;
this.projectedRecordDefinition = null;
this.columnPruner = new ColumnPruner(targetColumnList);
this.espConnInfo = espconninfo;
this.useTLK = useTLK;

try
{
if (filter != null && !filter.isEmpty())
Expand Down Expand Up @@ -163,12 +196,12 @@ public static int getFilePartFromFPos(long fpos)
}

/**
* Extracts the offset in the file part from a fileposition value.
* Extracts the offset in the file part from a fileposition value.
*
* @param fpos file position
* @return the project list
*/
public static long getOffsetFromFPos(long fpos)
public static long getOffsetFromFPos(long fpos)
{
// First 48 bits store the offset
return fpos & 0xffffffffffffL;
Expand Down Expand Up @@ -424,13 +457,20 @@ private void createDataParts() throws HpccFileException

this.recordDefinition = RecordDefinitionTranslator.parseJsonRecordDefinition(new JSONObject(originalRecDefInJSON));

try
if (this.useTLK)
{
this.partitionProcessor = new PartitionProcessor(this.recordDefinition, this.dataParts, this.tlkPartition);
try
{
this.partitionProcessor = new PartitionProcessor(this.recordDefinition, this.dataParts, this.tlkPartition);
}
catch (Exception e)
{
log.error("Error while constructing partition processor, reading will continue without partition filtering: " + e.getMessage());
this.partitionProcessor = new PartitionProcessor(this.recordDefinition, this.dataParts, null);
}
}
catch (Exception e)
else
{
log.error("Error while constructing partition processor, reading will continue without partition filtering: " + e.getMessage());
this.partitionProcessor = new PartitionProcessor(this.recordDefinition, this.dataParts, null);
}

Expand Down Expand Up @@ -622,13 +662,13 @@ private static String acquireFileAccess(String fileName, HPCCWsDFUClient hpcc, i
String uniqueID = "HPCC-FILE: " + UUID.randomUUID().toString();
return hpcc.getFileAccessBlob(fileName, clusterName, expirySeconds, uniqueID);
}

/**
* @return the file metadata information for this HPCCFile (if it exists)
*/
public DFUFileDetailWrapper getOriginalFileMetadata()
public DFUFileDetailWrapper getOriginalFileMetadata()
{
if (originalFileMetadata==null)
if (originalFileMetadata==null)
{
HPCCWsDFUClient dfuClient = HPCCWsDFUClient.get(espConnInfo);
if (dfuClient.hasInitError())
Expand Down

0 comments on commit 36f4050

Please sign in to comment.