diff --git a/dfsclient/pom.xml b/dfsclient/pom.xml
index ea15b7a62..ee01d1bdf 100644
--- a/dfsclient/pom.xml
+++ b/dfsclient/pom.xml
@@ -27,12 +27,20 @@
maven-surefire-plugin
${maven.surefire.version}
+
+
+
+ listener
+ org.hpccsystems.ws.client.TestResultNotifier
+
+
+
org.codehaus.mojo
templating-maven-plugin
${codehaus.template.version}
-
+
@@ -83,6 +91,13 @@
+
+ org.hpccsystems
+ wsclient
+ test-jar
+ test
+ ${project.version}
+
commons-cli
commons-cli
diff --git a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java
index 9318dd297..46f02f39f 100644
--- a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java
+++ b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSIndexTest.java
@@ -55,8 +55,9 @@
import static org.junit.Assert.assertTrue;
@Category(org.hpccsystems.commons.annotations.RemoteTests.class)
-public class DFSIndexTest extends BaseRemoteTest {
- String[] datasetNames = { "~test::index::integer", "~test::index::string" };
+public class DFSIndexTest extends BaseRemoteTest
+{
+ String[] datasetNames = {"~test::index::integer","~test::index::string"};
FieldDef[] datasetRecordDefinitions = new FieldDef[2];
ArrayList partitionRangeStart = new ArrayList();
@@ -65,86 +66,93 @@ public class DFSIndexTest extends BaseRemoteTest {
static boolean isSetup = false;
@Before
- public void setup() throws Exception {
- if (isSetup) {
+ public void setup() throws Exception
+ {
+ if (isSetup)
+ {
return;
}
isSetup = true;
// Integer key
FieldDef[] fieldDefs = new FieldDef[2];
- fieldDefs[0] = new FieldDef("key", FieldType.INTEGER, "INTEGER4", 4, true, false, HpccSrcType.LITTLE_ENDIAN,
- new FieldDef[0]);
- fieldDefs[1] = new FieldDef("payload", FieldType.STRING, "STRING16", 16, true, false,
- HpccSrcType.SINGLE_BYTE_CHAR, new FieldDef[0]);
+ fieldDefs[0] = new FieldDef("key", FieldType.INTEGER, "INTEGER4", 4, true, false, HpccSrcType.LITTLE_ENDIAN, new FieldDef[0]);
+ fieldDefs[1] = new FieldDef("payload", FieldType.STRING, "STRING16", 16, true, false, HpccSrcType.SINGLE_BYTE_CHAR, new FieldDef[0]);
- datasetRecordDefinitions[0] = new FieldDef("RootRecord", FieldType.RECORD, "rec", 4, false, false,
- HpccSrcType.LITTLE_ENDIAN, fieldDefs);
+ datasetRecordDefinitions[0]= new FieldDef("RootRecord", FieldType.RECORD, "rec", 4, false, false, HpccSrcType.LITTLE_ENDIAN, fieldDefs);
// String key
fieldDefs = new FieldDef[2];
- fieldDefs[0] = new FieldDef("key", FieldType.STRING, "STRING4", 4, true, false, HpccSrcType.SINGLE_BYTE_CHAR,
- new FieldDef[0]);
- fieldDefs[1] = new FieldDef("payload", FieldType.STRING, "STRING16", 16, true, false,
- HpccSrcType.SINGLE_BYTE_CHAR, new FieldDef[0]);
+ fieldDefs[0] = new FieldDef("key", FieldType.STRING, "STRING4", 4, true, false, HpccSrcType.SINGLE_BYTE_CHAR, new FieldDef[0]);
+ fieldDefs[1] = new FieldDef("payload", FieldType.STRING, "STRING16", 16, true, false, HpccSrcType.SINGLE_BYTE_CHAR, new FieldDef[0]);
- datasetRecordDefinitions[1] = new FieldDef("RootRecord", FieldType.RECORD, "rec", 4, false, false,
- HpccSrcType.LITTLE_ENDIAN, fieldDefs);
+ datasetRecordDefinitions[1]= new FieldDef("RootRecord", FieldType.RECORD, "rec", 4, false, false, HpccSrcType.LITTLE_ENDIAN, fieldDefs);
- for (int i = 0; i < datasetNames.length; i++) {
- // ------------------------------------------------------------------------------
+ for (int i = 0; i < datasetNames.length; i++)
+ {
+ //------------------------------------------------------------------------------
// Create indexable dataset
- // ------------------------------------------------------------------------------
+ //------------------------------------------------------------------------------
String datasetName = datasetNames[i];
FieldDef recordDef = datasetRecordDefinitions[i];
createIndexableFile(datasetName, recordDef, partitionRangeStart, partitionRangeEnd);
- // ------------------------------------------------------------------------------
+ //------------------------------------------------------------------------------
// Create index
- // ------------------------------------------------------------------------------
+ //------------------------------------------------------------------------------
String indexName = null;
- try {
- indexName = createIndexOnDataset(datasetName, recordDef);
- } catch (Exception e) {
+ try
+ {
+ indexName = createIndexOnDataset(datasetName,recordDef);
+ }
+ catch (Exception e)
+ {
Assume.assumeNoException("Failed to create index with error: ", e);
}
}
}
@Test
- public void hpccTLKFilterTest() throws Exception {
- for (int i = 0; i < datasetNames.length; i++) {
+ public void hpccTLKFilterTest() throws Exception
+ {
+ for (int i = 0; i < datasetNames.length; i++)
+ {
String datasetName = datasetNames[i];
FieldDef recordDef = datasetRecordDefinitions[i];
String indexName = datasetName + "::key";
- // ------------------------------------------------------------------------------
+ //------------------------------------------------------------------------------
// Read index and check TLK against known partition ranges
- // ------------------------------------------------------------------------------
+ //------------------------------------------------------------------------------
- HPCCFile file = new HPCCFile(indexName, connString, hpccUser, hpccPass);
+ HPCCFile file = new HPCCFile(indexName, connString , hpccUser, hpccPass);
assertTrue(file.isTlkIndex());
DataPartition[] fileParts = file.getFileParts();
FieldDef originalRD = file.getRecordDefinition();
- for (int j = 0; j < fileParts.length - 1; j++) {
+ for (int j = 0; j < fileParts.length-1; j++)
+ {
HPCCRecordBuilder recordBuilder = new HPCCRecordBuilder(file.getProjectedRecordDefinition());
- HpccRemoteFileReader fileReader = new HpccRemoteFileReader(fileParts[j],
- originalRD, recordBuilder);
- while (fileReader.hasNext()) {
+ HpccRemoteFileReader fileReader = new HpccRemoteFileReader(fileParts[j], originalRD, recordBuilder);
+ while (fileReader.hasNext())
+ {
HPCCRecord record = fileReader.next();
- if (record == null) {
+ if (record == null)
+ {
Assert.fail("PartitionProcessor: " + j + " failed to read record.");
}
// Check starting range
String filterStr = null;
- if (record.getField(0) instanceof String) {
+ if (record.getField(0) instanceof String)
+ {
filterStr = "key = \'" + record.getField(0).toString() + "\'";
- } else {
+ }
+ else
+ {
filterStr = "key = " + record.getField(0).toString();
}
@@ -153,17 +161,19 @@ public void hpccTLKFilterTest() throws Exception {
// Due to how TLK works we can get more that one partition
boolean hadExpectedPartition = false;
- for (int k = 0; k < matchedPartitions.size(); k++) {
- if (matchedPartitions.get(k).index() == j) {
+ for (int k = 0; k < matchedPartitions.size(); k++)
+ {
+ if (matchedPartitions.get(k).index() == j)
+ {
hadExpectedPartition = true;
break;
}
}
- if (hadExpectedPartition == false) {
+ if (hadExpectedPartition == false)
+ {
System.out.println("Partition: " + j + " Filter: " + filterStr);
- System.out.println(
- "Partition range: " + file.getPartitionProcessor().getPartitionRangeAsString(j));
+ System.out.println("Partition range: " + file.getPartitionProcessor().getPartitionRangeAsString(j));
Assert.fail("PartitionProcessor: " + j
+ " filtering result did not contain partition"
+ partitionListToString(matchedPartitions));
@@ -174,13 +184,14 @@ public void hpccTLKFilterTest() throws Exception {
}
@Test
- public void tlkFilterExample() throws Exception {
+ public void tlkFilterExample() throws Exception
+ {
System.out.println("Starting tlk filter test.");
- // ------------------------------------------------------------------------------
+ //------------------------------------------------------------------------------
// Read index and check TLK against known partition ranges
- // ------------------------------------------------------------------------------
+ //------------------------------------------------------------------------------
- HPCCFile file = new HPCCFile("~test::index::integer::key", connString, hpccUser, hpccPass);
+ HPCCFile file = new HPCCFile("~test::index::integer::key", connString , hpccUser, hpccPass);
// Find partitions that match the provided filter
Long searchValue = 3L;
@@ -191,18 +202,20 @@ public void tlkFilterExample() throws Exception {
DataPartition matchedPart = filteredPartitions.get(0);
HPCCRecordBuilder recordBuilder = new HPCCRecordBuilder(file.getProjectedRecordDefinition());
- HpccRemoteFileReader fileReader = new HpccRemoteFileReader(matchedPart,
- file.getRecordDefinition(), recordBuilder);
+ HpccRemoteFileReader fileReader = new HpccRemoteFileReader(matchedPart, file.getRecordDefinition(), recordBuilder);
boolean foundRecord = false;
- while (fileReader.hasNext()) {
+ while (fileReader.hasNext())
+ {
HPCCRecord record = fileReader.next();
- if (record == null) {
+ if (record == null)
+ {
Assert.fail("Received null record during read");
}
Long keyValue = (Long) record.getField(0);
- if (keyValue.equals(searchValue)) {
+ if (keyValue.equals(searchValue))
+ {
foundRecord = true;
}
System.out.println("Key: " + keyValue + " Search value: " + searchValue + " found: " + foundRecord);
@@ -211,36 +224,20 @@ public void tlkFilterExample() throws Exception {
}
@Test
- public void tlkBypassTest() throws Exception {
- // ------------------------------------------------------------------------------
- // Read index ignoring TLK and check that all partitions are returned
- // ------------------------------------------------------------------------------
-
- HPCCFile file = new HPCCFile("~test::index::integer::key", connString, hpccUser, hpccPass);
- file.setUseTLK(false);
- DataPartition[] dataParts = file.getFileParts();
-
- Long searchValue = 3L;
- FileFilter filter = new FileFilter("key = " + searchValue);
- List filteredPartitions = file.findMatchingPartitions(filter);
-
- // Without the TLK being read the above filter should return all file parts
- assertTrue("Unexpected number of partitions", filteredPartitions.size() == dataParts.length);
- }
-
- @Test
- public void biasedIntTest() throws Exception {
- HPCCFile file = new HPCCFile("~test::index::integer::key", connString, hpccUser, hpccPass);
+ public void biasedIntTest() throws Exception
+ {
+ HPCCFile file = new HPCCFile("~test::index::integer::key", connString , hpccUser, hpccPass);
DataPartition[] fileParts = file.getFileParts();
List records = new ArrayList();
FieldDef originalRD = file.getRecordDefinition();
- for (int j = 0; j < fileParts.length; j++) {
+ for (int j = 0; j < fileParts.length; j++)
+ {
HPCCRecordBuilder recordBuilder = new HPCCRecordBuilder(file.getProjectedRecordDefinition());
- HpccRemoteFileReader fileReader = new HpccRemoteFileReader(fileParts[j], originalRD,
- recordBuilder);
- while (fileReader.hasNext()) {
+ HpccRemoteFileReader fileReader = new HpccRemoteFileReader(fileParts[j], originalRD, recordBuilder);
+ while (fileReader.hasNext())
+ {
records.add(fileReader.next());
}
fileReader.close();
@@ -248,8 +245,7 @@ public void biasedIntTest() throws Exception {
assertTrue(records.size() >= 2);
- // Read the data from the first partition and make sure that biased integers
- // have been corrected
+ // Read the data from the first partition and make sure that biased integers have been corrected
int partitionIndex = 0;
{
HPCCRecord startRecord = records.get(0);
@@ -264,9 +260,11 @@ public void biasedIntTest() throws Exception {
}
}
- private String partitionListToString(List partitions) {
+ private String partitionListToString(List partitions)
+ {
String matchedPartitionStr = "[ ";
- for (DataPartition part : partitions) {
+ for (DataPartition part : partitions)
+ {
matchedPartitionStr += part.index() + " ";
}
matchedPartitionStr += "]";
@@ -274,41 +272,41 @@ private String partitionListToString(List partitions) {
return matchedPartitionStr;
}
- private void createIndexableFile(String fileName, FieldDef recordDef, List partitionRangeStart,
- List partitionRangeEnd) {
- try {
- // ------------------------------------------------------------------------------
- // Request a temp file be created in HPCC to write to
- // ------------------------------------------------------------------------------
+ private void createIndexableFile(String fileName, FieldDef recordDef, List partitionRangeStart, List partitionRangeEnd)
+ {
+ try
+ {
+ //------------------------------------------------------------------------------
+ // Request a temp file be created in HPCC to write to
+ //------------------------------------------------------------------------------
String eclRecordDefn = RecordDefinitionTranslator.toECLRecord(recordDef);
HPCCWsDFUClient dfuClient = wsclient.getWsDFUClient();
String filegroupname = this.thorClusterFileGroup;
- DFUCreateFileWrapper createResult = dfuClient.createFile(fileName, filegroupname, eclRecordDefn, 300, false,
- DFUFileTypeWrapper.Flat, "");
+ DFUCreateFileWrapper createResult = dfuClient.createFile(fileName, filegroupname, eclRecordDefn, 300, false, DFUFileTypeWrapper.Flat, "");
DFUFilePartWrapper[] dfuFileParts = createResult.getFileParts();
DataPartition[] hpccPartitions = DataPartition.createPartitions(dfuFileParts,
- new NullRemapper(new RemapInfo(), createResult.getFileAccessInfo()), dfuFileParts.length,
- createResult.getFileAccessInfoBlob());
+ new NullRemapper(new RemapInfo(), createResult.getFileAccessInfo()), dfuFileParts.length, createResult.getFileAccessInfoBlob());
- // ------------------------------------------------------------------------------
- // Write partitions to file parts and keep track of record ranges
- // ------------------------------------------------------------------------------
+ //------------------------------------------------------------------------------
+ // Write partitions to file parts and keep track of record ranges
+ //------------------------------------------------------------------------------
partitionRangeStart.clear();
partitionRangeEnd.clear();
long bytesWritten = 0;
int numRecords = 0;
- for (int partitionIndex = 0; partitionIndex < hpccPartitions.length; partitionIndex++) {
+ for (int partitionIndex = 0; partitionIndex < hpccPartitions.length; partitionIndex++)
+ {
HPCCRecordAccessor recordAccessor = new HPCCRecordAccessor(recordDef);
- HPCCRemoteFileWriter fileWriter = new HPCCRemoteFileWriter(
- hpccPartitions[partitionIndex], recordDef, recordAccessor, CompressionAlgorithm.NONE);
+ HPCCRemoteFileWriter fileWriter = new HPCCRemoteFileWriter(hpccPartitions[partitionIndex], recordDef, recordAccessor, CompressionAlgorithm.NONE);
- try {
+ try
+ {
List recordRange = createRecordRange(partitionIndex, hpccPartitions.length, recordDef);
for (HPCCRecord record : recordRange) {
fileWriter.writeRecord(record);
@@ -320,18 +318,22 @@ private void createIndexableFile(String fileName, FieldDef recordDef, List records = new ArrayList();
- for (long i = 0; i < fileParts.length; i++) {
+ for (long i = 0; i < fileParts.length; i++)
+ {
long start = System.currentTimeMillis();
HpccRandomAccessFileReader fileReader = null;
- try {
- DataPartition fp = fileParts[(int) i];
+ try
+ {
+ DataPartition fp = fileParts[(int)i];
HPCCRecordBuilder recordBuilder = new HPCCRecordBuilder(file.getProjectedRecordDefinition());
- fileReader = new HpccRandomAccessFileReader(fp, originalRD, recordBuilder, -1);
- } catch (Exception e) {
+ fileReader = new HpccRandomAccessFileReader(fp, originalRD, recordBuilder,-1);
+ }
+ catch (Exception e)
+ {
Assert.fail("Error: " + e.getMessage());
}
long end = System.currentTimeMillis();
System.out.println("Time to create batch record reader: " + (end - start) + "ms");
start = System.currentTimeMillis();
- Long[] recOffsets = { 20L };
+ Long[] recOffsets = {20L};
fileReader.addRecordRequests(Arrays.asList(recOffsets));
- while (fileReader.hasNext()) {
+ while (fileReader.hasNext())
+ {
HPCCRecord record = fileReader.next();
- if (record == null) {
+ if (record == null)
+ {
Assert.fail("Error: failed to read record.");
}
long expectedKeyValue = 3 + 4 * i;
Long keyValue = (Long) record.getField(0);
- if (keyValue != expectedKeyValue) {
+ if (keyValue != expectedKeyValue)
+ {
Assert.fail("Error: key values did not match.");
}
}
@@ -398,64 +408,80 @@ public void testBatchRandomAccess() throws Exception {
fileReader.close();
}
- } catch (Exception e) {
+ }
+ catch (Exception e)
+ {
System.out.println("Error: " + e.getMessage());
}
}
- List createRecordRange(int partitionIndex, int numPartitions, FieldDef recordDef) {
+ List createRecordRange(int partitionIndex, int numPartitions, FieldDef recordDef)
+ {
Object[] rangeStartFields = new Object[recordDef.getNumDefs()];
Object[] rangeEndFields = new Object[recordDef.getNumDefs()];
- for (int i = 0; i < recordDef.getNumDefs(); i++) {
+ for (int i = 0; i < recordDef.getNumDefs(); i++)
+ {
boolean isKeyField = (i == 0);
boolean isStart = true;
- rangeStartFields[i] = createFieldValue(partitionIndex, numPartitions, recordDef.getDef(i), isKeyField,
- isStart);
+ rangeStartFields[i] = createFieldValue(partitionIndex, numPartitions, recordDef.getDef(i), isKeyField, isStart);
isStart = false;
- rangeEndFields[i] = createFieldValue(partitionIndex, numPartitions, recordDef.getDef(i), isKeyField,
- isStart);
+ rangeEndFields[i] = createFieldValue(partitionIndex, numPartitions, recordDef.getDef(i),isKeyField, isStart);
}
ArrayList recordRange = new ArrayList();
- recordRange.add(new HPCCRecord(rangeStartFields, recordDef));
- recordRange.add(new HPCCRecord(rangeEndFields, recordDef));
+ recordRange.add(new HPCCRecord(rangeStartFields,recordDef));
+ recordRange.add(new HPCCRecord(rangeEndFields,recordDef));
return recordRange;
}
- Object createFieldValue(int partitionIndex, int numPartitions, FieldDef fd, boolean isKeyField, boolean isStart) {
- if (isKeyField) {
- if (fd.isFixed() == false) {
+ Object createFieldValue(int partitionIndex, int numPartitions, FieldDef fd, boolean isKeyField, boolean isStart)
+ {
+ if (isKeyField)
+ {
+ if (fd.isFixed() == false)
+ {
Assert.fail("Invalid key field type");
}
- switch (fd.getFieldType()) {
- case BOOLEAN: {
+ switch (fd.getFieldType())
+ {
+ case BOOLEAN:
+ {
return Boolean.valueOf(isStart == false);
}
- case INTEGER: {
- if (isStart) {
- return new Integer(partitionIndex * 4);
- } else {
- return new Integer(partitionIndex * 4 + 3);
+ case INTEGER:
+ {
+ if (isStart)
+ {
+ return new Integer(partitionIndex*4);
+ }
+ else
+ {
+ return new Integer(partitionIndex*4+3);
}
}
- case STRING: {
+ case STRING:
+ {
// Convert partitionIndex * 4 + 0/3 into base 26 string
int rangeNum = 0;
- if (isStart) {
+ if (isStart)
+ {
rangeNum = partitionIndex * 4;
- } else {
+ }
+ else
+ {
rangeNum = partitionIndex * 4 + 3;
}
StringBuilder builder = new StringBuilder(" ");
int charIndex = (int) Math.ceil(Math.log(numPartitions) / Math.log(26));
- while (rangeNum > 0) {
+ while (rangeNum > 0)
+ {
char currentLetter = (char) ('A' + (rangeNum % 26));
- builder.setCharAt(charIndex, currentLetter);
+ builder.setCharAt(charIndex,currentLetter);
rangeNum /= 26;
charIndex--;
@@ -467,8 +493,11 @@ Object createFieldValue(int partitionIndex, int numPartitions, FieldDef fd, bool
Assert.fail("Invalid key field type");
return null;
}
- } else {
- if (fd.getFieldType() != FieldType.STRING) {
+ }
+ else
+ {
+ if (fd.getFieldType() != FieldType.STRING)
+ {
Assert.fail("Invalid payload field type.");
return null;
}
@@ -477,12 +506,12 @@ Object createFieldValue(int partitionIndex, int numPartitions, FieldDef fd, bool
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
DFSIndexTest test = new DFSIndexTest();
try {
test.hpccTLKFilterTest();
test.tlkFilterExample();
- } catch (Exception e) {
- }
+ } catch(Exception e) {}
}
-}
+}
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 70facfd3e..1aeae2d0a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -227,6 +227,13 @@
**/WSDLs
+
+
+
+ test-jar
+
+
+
maven-install-plugin
diff --git a/wsclient/pom.xml b/wsclient/pom.xml
index 1d239e14e..641263cd0 100644
--- a/wsclient/pom.xml
+++ b/wsclient/pom.xml
@@ -78,9 +78,17 @@
-
+
maven-surefire-plugin
${maven.surefire.version}
+
+
+
+ listener
+ org.hpccsystems.ws.client.TestResultNotifier
+
+
+