Java Code Examples for org.apache.pig.data.DataBag#iterator()
The following examples show how to use
org.apache.pig.data.DataBag#iterator() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Over.java From spork with Apache License 2.0 | 6 votes |
@Override public Object exec(Tuple input) throws IOException { DataBag inbag = (DataBag)input.get(0); OverBag.OverBagIterator iter = (OverBag.OverBagIterator)inbag.iterator(); try { if (currentRow >= 0) { return iter.tuples.get(currentRow).get(0); } else if (deflt != null) { return deflt; } else { return null; } } finally { currentRow++; } }
Example 2
Source File: AllFirstLetter.java From spork with Apache License 2.0 | 6 votes |
@Override public void accumulate(Tuple b) throws IOException { try { DataBag bag = (DataBag) b.get(0); Iterator it = bag.iterator(); while (it.hasNext()) { Tuple t = (Tuple) it.next(); if (t != null && t.size() > 0 && t.get(0) != null) { result += t.get(0).toString().substring(0, 1); } } } catch (ExecException ee) { throw ee; } catch (Exception e) { int errCode = 2106; String msg = "Error while computing AllFirstLetter in " + this.getClass().getSimpleName(); throw new ExecException(msg, errCode, PigException.BUG, e); } }
Example 3
Source File: TestFindQuantiles.java From spork with Apache License 2.0 | 6 votes |
@Test public void testFindQuantilesRemainder() throws Exception { final int numSamples = 1900; final int numReducers = 300; DataBag samples = generateRandomSortedSamples(numSamples, 365); Map<String, Object> findQuantilesResult = getFindQuantilesResult(samples, numReducers); DataBag quantilesBag = (DataBag)findQuantilesResult.get(FindQuantiles.QUANTILES_LIST); Iterator<Tuple> iter = quantilesBag.iterator(); Tuple lastQuantile = null; while (iter.hasNext()) { lastQuantile = iter.next(); } int lastQuantileNum = (Integer)lastQuantile.get(0); int count = 0; iter = samples.iterator(); while (iter.hasNext()) { Tuple t = iter.next(); int num = (Integer)t.get(0); if (num >= lastQuantileNum) { count++; } } assertTrue((double)count/numSamples <= 1.0/365 + 0.001); }
Example 4
Source File: DisplayExamples.java From spork with Apache License 2.0 | 5 votes |
static String[][] MakeArray(Operator op, DataBag bag) throws Exception { int rows = (int) bag.size(); int cols = ((LogicalRelationalOperator)op).getSchema().getFields().size(); String[][] table = new String[rows][cols]; Iterator<Tuple> it = bag.iterator(); for (int i = 0; i < rows; ++i) { Tuple t = it.next(); for (int j = 0; j < cols; ++j) { table[i][j] = ShortenField(t.get(j)); } } return table; }
Example 5
Source File: BigDecimalAvg.java From spork with Apache License 2.0 | 5 votes |
static protected Tuple combine(DataBag values) throws ExecException { BigDecimal sum = BigDecimal.ZERO; BigDecimal count = BigDecimal.ZERO; // combine is called from Intermediate and Final // In either case, Initial would have been called // before and would have sent in valid tuples // Hence we don't need to check if incoming bag // is empty Tuple output = mTupleFactory.newTuple(2); boolean sawNonNull = false; for (Iterator<Tuple> it = values.iterator(); it.hasNext();) { Tuple t = it.next(); BigDecimal d = (BigDecimal)t.get(0); // we count nulls in avg as contributing 0 // a departure from SQL for performance of // COUNT() which implemented by just inspecting // size of the bag if (d == null) { d = BigDecimal.ZERO; } else { sawNonNull = true; } sum = sum.add(d); count = count.add((BigDecimal)t.get(1)); } if (sawNonNull) { output.set(0, sum); } else { output.set(0, null); } output.set(1, count); return output; }
Example 6
Source File: AllFirstLetter.java From spork with Apache License 2.0 | 5 votes |
public String exec(Tuple input) throws IOException { result = ""; DataBag bag = (DataBag) input.get(0); Iterator<Tuple> it = bag.iterator(); while (it.hasNext()) { Tuple t = it.next(); if (t != null && t.size() > 0 && t.get(0) != null) result += t.get(0).toString().substring(0, 1); } return result; }
Example 7
Source File: IntAvg.java From spork with Apache License 2.0 | 5 votes |
static protected Tuple combine(DataBag values) throws ExecException { long sum = 0; long count = 0; // combine is called from Intermediate and Final // In either case, Initial would have been called // before and would have sent in valid tuples // Hence we don't need to check if incoming bag // is empty Tuple output = mTupleFactory.newTuple(2); boolean sawNonNull = false; for (Iterator<Tuple> it = values.iterator(); it.hasNext();) { Tuple t = it.next(); Long l = (Long)t.get(0); // we count nulls in avg as contributing 0 // a departure from SQL for performance of // COUNT() which implemented by just inspecting // size of the bag if(l == null) { l = 0L; } else { sawNonNull = true; } sum += l; count += (Long)t.get(1); } if(sawNonNull) { output.set(0, Long.valueOf(sum)); } else { output.set(0, null); } output.set(1, Long.valueOf(count)); return output; }
Example 8
Source File: TestBuiltin.java From spork with Apache License 2.0 | 5 votes |
@Test public void testTOKENIZE() throws Exception { TupleFactory tf = TupleFactory.getInstance(); Tuple t1 = tf.newTuple(1); t1.set(0, "123 456\"789"); Tuple t2 = tf.newTuple(1); t2.set(0, null); Tuple t3 = tf.newTuple(0); TOKENIZE f = new TOKENIZE(); DataBag b = f.exec(t1); assertTrue(b.size()==3); Iterator<Tuple> i = b.iterator(); Tuple rt = i.next(); assertTrue(rt.get(0).equals("123")); rt = i.next(); assertTrue(rt.get(0).equals("456")); rt = i.next(); assertTrue(rt.get(0).equals("789")); // Check when delim specified Tuple t4 = tf.newTuple(2); t4.set(0, "123|456|78\"9"); t4.set(1, "|"); b = f.exec(t4); assertTrue(b.size()==3); i = b.iterator(); rt = i.next(); assertTrue(rt.get(0).equals("123")); rt = i.next(); assertTrue(rt.get(0).equals("456")); rt = i.next(); assertTrue(rt.get(0).equals("78\"9")); b = f.exec(t2); assertTrue(b==null); b = f.exec(t3); assertTrue(b==null); }
Example 9
Source File: TestHelper.java From spork with Apache License 2.0 | 5 votes |
public static boolean bagContains(DataBag db, Tuple t) { Iterator<Tuple> iter = db.iterator(); for (Tuple tuple : db) { if (tuple.compareTo(t) == 0 || tupleEquals(tuple, t)) return true; } return false; }
Example 10
Source File: TOP.java From spork with Apache License 2.0 | 5 votes |
protected static void updateTop(PriorityQueue<Tuple> store, int limit, DataBag inputBag) { Iterator<Tuple> itr = inputBag.iterator(); while (itr.hasNext()) { Tuple t = itr.next(); store.add(t); if (store.size() > limit) store.poll(); } }
Example 11
Source File: LineageTrimmingVisitor.java From spork with Apache License 2.0 | 5 votes |
private void processLoad(LOLoad ld) throws FrontendException { // prune base records if (inputToDataMap.get(ld.getFileSpec()) != null) { baseData.put(ld, inputToDataMap.get(ld.getFileSpec())); return; } DataBag data = baseData.get(ld); if (data == null || data.size() < 2) return; Set<Tuple> realData = new HashSet<Tuple>(), syntheticData = new HashSet<Tuple>(); for (Iterator<Tuple> it = data.iterator(); it.hasNext(); ) { Tuple t = it.next(); if (((ExampleTuple)t).synthetic) syntheticData.add(t); else realData.add(t); } Map<LOLoad, DataBag> newBaseData = new HashMap<LOLoad, DataBag>(); DataBag newData = BagFactory.getInstance().newDefaultBag(); newBaseData.put(ld, newData); for (Map.Entry<LOLoad, DataBag> entry : baseData.entrySet()) { if (entry.getKey() != ld) { if (!entry.getKey().getFileSpec().equals(ld.getFileSpec())) newBaseData.put(entry.getKey(), entry.getValue()); else newBaseData.put(entry.getKey(), newData); } } if (checkNewBaseData(newData, newBaseData, realData)) checkNewBaseData(newData, newBaseData, syntheticData); inputToDataMap.put(ld.getFileSpec(), baseData.get(ld)); }
Example 12
Source File: Over.java From spork with Apache License 2.0 | 5 votes |
@Override public Object exec(Tuple input) throws IOException { DataBag inbag = (DataBag)input.get(0); OverBag.OverBagIterator iter = (OverBag.OverBagIterator)inbag.iterator(); return iter.tuples.get(iter.end - 1).get(0); }
Example 13
Source File: TestMapSideCogroup.java From spork with Apache License 2.0 | 5 votes |
@Test public void testCogrpOnMultiKeys() throws Exception{ PigServer pigServer = new PigServer(cluster.getExecType(), cluster.getProperties()); pigServer.registerQuery("A = LOAD '" + INPUT_FILE1 + "' using "+ DummyCollectableLoader.class.getName() +"() as (c1:chararray,c2:chararray);"); pigServer.registerQuery("B = LOAD '" + INPUT_FILE2 + "' using "+ DummyIndexableLoader.class.getName() +"() as (c1:chararray,c2:chararray);"); DataBag dbMergeCogrp = BagFactory.getInstance().newDefaultBag(); pigServer.registerQuery("C = cogroup A by (c1,c2) , B by (c1,c2) using 'merge' ;"); Iterator<Tuple> iter = pigServer.openIterator("C"); while(iter.hasNext()) { Tuple t = iter.next(); dbMergeCogrp.add(t); } String[] results = new String[]{ "((1,1),{(1,1)},{(1,1)})", "((1,2),{(1,2)},{(1,2)})", "((1,3),{(1,3)},{(1,3)})", "((2,1),{(2,1)},{(2,1)})", "((2,2),{(2,2)},{(2,2)})", "((2,3),{(2,3)},{(2,3)})", "((3,1),{(3,1)},{(3,1)})", "((3,2),{(3,2)},{(3,2)})", "((3,3),{(3,3)},{(3,3)})" }; assertEquals(9, dbMergeCogrp.size()); Iterator<Tuple> itr = dbMergeCogrp.iterator(); for(int i=0; i<9; i++){ assertEquals(itr.next().toString(), results[i]); } assertFalse(itr.hasNext()); }
Example 14
Source File: TestMapSideCogroup.java From spork with Apache License 2.0 | 5 votes |
@Test public void test3Way() throws Exception{ PigServer pigServer = new PigServer(cluster.getExecType(), cluster.getProperties()); pigServer.registerQuery("A = LOAD '" + INPUT_FILE1 + "' using "+ DummyCollectableLoader.class.getName() +"() as (c1:chararray,c2:int);"); pigServer.registerQuery("B = LOAD '" + INPUT_FILE2 + "' using "+ DummyIndexableLoader.class.getName() +"() as (c1:chararray,c2:int);"); pigServer.registerQuery("E = LOAD '" + INPUT_FILE3 + "' using "+ DummyIndexableLoader.class.getName() +"() as (c1:chararray,c2:int);"); DataBag dbMergeCogrp = BagFactory.getInstance().newDefaultBag(); pigServer.registerQuery("C = cogroup A by c1, B by c1, E by c1 using 'merge';"); Iterator<Tuple> iter = pigServer.openIterator("C"); while(iter.hasNext()) { Tuple t = iter.next(); dbMergeCogrp.add(t); } String[] results = new String[]{ "(1,{(1,1),(1,2),(1,3)},{(1,1),(1,2),(1,3)},{(1,1),(1,2),(1,3)})", "(2,{(2,2),(2,1),(2,3)},{(2,1),(2,2),(2,3)},{(2,1),(2,2),(2,3)})", "(3,{(3,2),(3,3),(3,1)},{(3,1),(3,2),(3,3)},{(3,1),(3,2),(3,3)})" }; assertEquals(3, dbMergeCogrp.size()); Iterator<Tuple> itr = dbMergeCogrp.iterator(); for(int i=0; i<3; i++){ assertEquals(itr.next().toString(), results[i]); } assertFalse(itr.hasNext()); }
Example 15
Source File: LongVAR.java From datafu with Apache License 2.0 | 4 votes |
static protected Tuple combine(DataBag values) throws ExecException{ long sum = 0; long sumSquare = 0; long totalCount = 0; // combine is called from Intermediate and Final // In either case, Initial would have been called // before and would have sent in valid tuples // Hence we don't need to check if incoming bag // is empty Tuple output = mTupleFactory.newTuple(3); boolean sawNonNull = false; for (Iterator<Tuple> it = values.iterator(); it.hasNext();) { Tuple t = it.next(); Long l = (Long)t.get(0); Long lSquare = (Long)t.get(1); Long count = (Long)t.get(2); // we count nulls in var as contributing 0 // a departure from SQL for performance of // COUNT() which implemented by just inspecting // size of the bag if(l == null) { l = (long)0; lSquare = (long)0; } else { sawNonNull = true; } sum += l; sumSquare += lSquare; totalCount += count; } if(sawNonNull) { output.set(0, new Long(sum)); output.set(1, new Long(sumSquare)); } else { output.set(0, null); output.set(1, null); } output.set(2, Long.valueOf(totalCount)); return output; }
Example 16
Source File: POPartitionRearrangeTez.java From spork with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") private void init() throws RuntimeException { ObjectCache cache = ObjectCache.getInstance(); String isCachedKey = "sample-" + PigProcessor.sampleVertex + ".cached"; String totalReducersCacheKey = "sample-" + PigProcessor.sampleVertex + ".totalReducers"; String reducerMapCacheKey = "sample-" + PigProcessor.sampleVertex + ".reducerMap"; if (cache.retrieve(isCachedKey) == Boolean.TRUE) { totalReducers = (Integer) cache.retrieve(totalReducersCacheKey); reducerMap = (Map<Object, Pair<Integer, Integer>>) cache.retrieve(reducerMapCacheKey); LOG.info("Found totalReducers and reducerMap in Tez cache. cachekey=" + totalReducersCacheKey + "," + reducerMapCacheKey); inited = true; return; } Map<String, Object> distMap = null; if (PigProcessor.sampleMap != null) { // We've already collected sampleMap in PigProcessor distMap = PigProcessor.sampleMap; } else { LOG.info("Key distribution map is empty"); inited = true; return; } long start = System.currentTimeMillis(); try { // The distMap is structured as (key, min, max) where min, max // being the index of the reducers DataBag partitionList = (DataBag) distMap.get(PartitionSkewedKeys.PARTITION_LIST); totalReducers = Integer.valueOf("" + distMap.get(PartitionSkewedKeys.TOTAL_REDUCERS)); Iterator<Tuple> it = partitionList.iterator(); while (it.hasNext()) { Tuple idxTuple = it.next(); Integer maxIndex = (Integer) idxTuple.get(idxTuple.size() - 1); Integer minIndex = (Integer) idxTuple.get(idxTuple.size() - 2); // Used to replace the maxIndex with the number of reducers if (maxIndex < minIndex) { maxIndex = totalReducers + maxIndex; } Object keyT; // if the join is on more than 1 key if (idxTuple.size() > 3) { // remove the last 2 fields of the tuple, i.e: minIndex // and maxIndex and store it in the reducer map Tuple keyTuple = tf.newTuple(); for (int i=0; i < idxTuple.size() - 2; i++) { keyTuple.append(idxTuple.get(i)); } keyT = keyTuple; } else { keyT = idxTuple.get(0); } // number of reducers Integer cnt = maxIndex - minIndex; // 1 is added to account for the 0 index reducerMap.put(keyT, new Pair<Integer, Integer>(minIndex, cnt)); } } catch (Exception e) { throw new RuntimeException(e); } LOG.info("Initialized POPartitionRearrangeTez. Time taken: " + (System.currentTimeMillis() - start)); cache.cache(isCachedKey, Boolean.TRUE); cache.cache(totalReducersCacheKey, totalReducers); cache.cache(reducerMapCacheKey, reducerMap); inited = true; }
Example 17
Source File: TestStat.java From spork with Apache License 2.0 | 4 votes |
public void testCOR() throws Exception{ COR cor = new COR("a","b"); DataBag dBag = DefaultBagFactory.getInstance().newDefaultBag(); Tuple tup1 = DefaultTupleFactory.getInstance().newTuple(1); tup1.set(0, 1.0); dBag.add(tup1); tup1 = DefaultTupleFactory.getInstance().newTuple(1); tup1.set(0, 4.0); dBag.add(tup1); tup1 = DefaultTupleFactory.getInstance().newTuple(1); tup1.set(0, 8.0); dBag.add(tup1); tup1 = DefaultTupleFactory.getInstance().newTuple(1); tup1.set(0, 4.0); dBag.add(tup1); tup1 = DefaultTupleFactory.getInstance().newTuple(1); tup1.set(0, 7.0); dBag.add(tup1); tup1 = DefaultTupleFactory.getInstance().newTuple(1); tup1.set(0, 8.0); dBag.add(tup1); DataBag dBag1 = DefaultBagFactory.getInstance().newDefaultBag(); tup1 = DefaultTupleFactory.getInstance().newTuple(1); tup1.set(0, 2.0); dBag1.add(tup1); tup1 = DefaultTupleFactory.getInstance().newTuple(1); tup1.set(0, 2.0); dBag1.add(tup1); tup1 = DefaultTupleFactory.getInstance().newTuple(1); tup1.set(0, 3.0); dBag1.add(tup1); tup1 = DefaultTupleFactory.getInstance().newTuple(1); tup1.set(0, 3.0); dBag1.add(tup1); tup1 = DefaultTupleFactory.getInstance().newTuple(1); tup1.set(0, 2.0); dBag1.add(tup1); tup1 = DefaultTupleFactory.getInstance().newTuple(1); tup1.set(0, 4.0); dBag1.add(tup1); Tuple input = DefaultTupleFactory.getInstance().newTuple(2); input.set(0, dBag); input.set(1, dBag1); DataBag output = cor.exec(input); Iterator<Tuple> it = output.iterator(); Tuple ans = (Tuple) it.next(); assertEquals((String)ans.get(0),"a"); assertEquals((String)ans.get(1),"b"); assertEquals(0.582222509739582, (Double)ans.get(2) ,0.0005); }
Example 18
Source File: DoubleVAR.java From datafu with Apache License 2.0 | 4 votes |
static protected Tuple combine(DataBag values) throws ExecException{ double sum = 0; double sumSquare = 0; long totalCount = 0; // combine is called from Intermediate and Final // In either case, Initial would have been called // before and would have sent in valid tuples // Hence we don't need to check if incoming bag // is empty Tuple output = mTupleFactory.newTuple(3); boolean sawNonNull = false; for (Iterator<Tuple> it = values.iterator(); it.hasNext();) { Tuple t = it.next(); Double d = (Double)t.get(0); Double dSquare = (Double)t.get(1); Long count = (Long)t.get(2); // we count nulls in var as contributing 0 // a departure from SQL for performance of // COUNT() which implemented by just inspecting // size of the bag if(d == null) { d = 0.0; dSquare = 0.0; } else { sawNonNull = true; } sum += d; sumSquare += dSquare; totalCount += count; } if(sawNonNull) { output.set(0, new Double(sum)); output.set(1, new Double(sumSquare)); } else { output.set(0, null); output.set(1, null); } output.set(2, Long.valueOf(totalCount)); return output; }
Example 19
Source File: SkewedPartitionerTez.java From spork with Apache License 2.0 | 4 votes |
@Override protected void init() { Map<String, Object> distMap = null; if (PigProcessor.sampleMap != null) { // We've collected sampleMap in PigProcessor distMap = PigProcessor.sampleMap; } else { LOG.info("Key distribution map is empty"); inited = true; return; } long start = System.currentTimeMillis(); try { // The distMap is structured as (key, min, max) where min, max // being the index of the reducers DataBag partitionList = (DataBag) distMap.get(PartitionSkewedKeys.PARTITION_LIST); totalReducers = Integer.valueOf("" + distMap.get(PartitionSkewedKeys.TOTAL_REDUCERS)); Iterator<Tuple> it = partitionList.iterator(); while (it.hasNext()) { Tuple idxTuple = it.next(); Integer maxIndex = (Integer) idxTuple.get(idxTuple.size() - 1); Integer minIndex = (Integer) idxTuple.get(idxTuple.size() - 2); // Used to replace the maxIndex with the number of reducers if (maxIndex < minIndex) { maxIndex = totalReducers + maxIndex; } Tuple keyT; // if the join is on more than 1 key if (idxTuple.size() > 3) { // remove the last 2 fields of the tuple, i.e: minIndex and maxIndex and store // it in the reducer map Tuple keyTuple = tf.newTuple(); for (int i=0; i < idxTuple.size() - 2; i++) { keyTuple.append(idxTuple.get(i)); } keyT = keyTuple; } else { keyT = tf.newTuple(1); keyT.set(0,idxTuple.get(0)); } // number of reducers Integer cnt = maxIndex - minIndex; // 1 is added to account for the 0 index reducerMap.put(keyT, new Pair<Integer, Integer>(minIndex, cnt)); } } catch (Exception e) { throw new RuntimeException(e); } LOG.info("Initialized SkewedPartitionerTez. Time taken: " + (System.currentTimeMillis() - start)); inited = true; }
Example 20
Source File: TestBuiltin.java From spork with Apache License 2.0 | 4 votes |
@Test public void testStatsFunc() throws Exception { COV cov = new COV("a","b"); DataBag dBag = DefaultBagFactory.getInstance().newDefaultBag(); Tuple tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 1.0); dBag.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 4.0); dBag.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 8.0); dBag.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 4.0); dBag.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 7.0); dBag.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 8.0); dBag.add(tup1); DataBag dBag1 = DefaultBagFactory.getInstance().newDefaultBag(); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 2.0); dBag1.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 2.0); dBag1.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 3.0); dBag1.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 3.0); dBag1.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 2.0); dBag1.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 4.0); dBag1.add(tup1); Tuple input = TupleFactory.getInstance().newTuple(2); input.set(0, dBag); input.set(1, dBag1); DataBag output = cov.exec(input); Iterator<Tuple> it = output.iterator(); Tuple ans = it.next(); assertEquals(ans.get(0),"a"); assertEquals(ans.get(1),"b"); assertEquals(1.11111, (Double)ans.get(2),0.0005); COR cor = new COR("a","b"); dBag = DefaultBagFactory.getInstance().newDefaultBag(); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 1.0); dBag.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 4.0); dBag.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 8.0); dBag.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 4.0); dBag.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 7.0); dBag.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 8.0); dBag.add(tup1); dBag1 = DefaultBagFactory.getInstance().newDefaultBag(); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 2.0); dBag1.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 2.0); dBag1.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 3.0); dBag1.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 3.0); dBag1.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 2.0); dBag1.add(tup1); tup1 = TupleFactory.getInstance().newTuple(1); tup1.set(0, 4.0); dBag1.add(tup1); input = TupleFactory.getInstance().newTuple(2); input.set(0, dBag); input.set(1, dBag1); output = cor.exec(input); it = output.iterator(); ans = it.next(); assertEquals(ans.get(0),"a"); assertEquals(ans.get(1),"b"); assertEquals(0.582222509739582, (Double)ans.get(2) ,0.0005); }