org.pentaho.di.core.row.RowDataUtil Java Examples
The following examples show how to use
org.pentaho.di.core.row.RowDataUtil.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KVStringStringToKettleRowFn.java From kettle-beam with Apache License 2.0 | 7 votes |
@ProcessElement public void processElement( ProcessContext processContext ) { try { KV<String,String> kv = processContext.element(); inputCounter.inc(); Object[] outputRow = RowDataUtil.allocateRowData( rowMeta.size() ); outputRow[ 0 ] = kv.getKey(); // String outputRow[ 1 ] = kv.getValue(); // String processContext.output( new KettleRow( outputRow ) ); writtenCounter.inc(); } catch ( Exception e ) { numErrors.inc(); LOG.error( "Error in KV<Long,String> to Kettle Row conversion function", e ); throw new RuntimeException( "Error in KV<Long,String> to Kettle Row conversion function", e ); } }
Example #2
Source File: ParseMailInputTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Message content type is correct * * @throws Exception */ @Test public void testMessageContentTypeIsParsed() throws Exception { int[] fields = { MailInputField.COLUMN_CONTENT_TYPE }; MailInputField[] farr = this.getDefaultInputFields( fields ); this.mockMailInputMeta( farr ); try { mailInput.processRow( meta, data ); } catch ( KettleException e ) { // don't worry about it } MessageParser underTest = mailInput.new MessageParser(); Object[] r = RowDataUtil.allocateRowData( data.nrFields ); underTest.parseToArray( r, message ); Assert.assertEquals( "Message Content type is correct", CNTNT_TYPE_EMAIL, String.class.cast( r[0] ) ); }
Example #3
Source File: EditRowsDialog.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@VisibleForTesting Object[] getRowForData( TableItem item, int rowNr ) throws KettleException { try { Object[] row = RowDataUtil.allocateRowData( rowMeta.size() ); for ( int i = 0; i < rowMeta.size(); i++ ) { ValueMetaInterface valueMeta = rowMeta.getValueMeta( i ); ValueMetaInterface stringValueMeta = stringRowMeta.getValueMeta( i ); int colnr = i + 1; if ( isDisplayingNullValue( item, colnr ) ) { row[i] = null; // <null> value } else { String string = item.getText( colnr ); if ( stringValueMeta.isNull( string ) ) { string = null; } row[i] = valueMeta.convertDataFromString( string, stringValueMeta, null, null, ValueMetaInterface.TRIM_TYPE_NONE ); } } return row; } catch ( KettleException e ) { throw new KettleException( BaseMessages.getString( PKG, "EditRowsDialog.Error.ErrorGettingRowForData", Integer.toString( rowNr ) ), e ); } }
Example #4
Source File: ParseMailInputTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * [PDI-6532] When mail header is found returns his actual value. * * @throws Exception * @throws KettleException */ @Test public void testHeadersParsedPositive() throws Exception { // add expected fields: int[] fields = { MailInputField.COLUMN_HEADER }; MailInputField[] farr = this.getDefaultInputFields( fields ); // points to existed header farr[0].setName( HDR_EX1 ); this.mockMailInputMeta( farr ); try { mailInput.processRow( meta, data ); } catch ( KettleException e ) { // don't worry about it } MessageParser underTest = mailInput.new MessageParser(); Object[] r = RowDataUtil.allocateRowData( data.nrFields ); underTest.parseToArray( r, message ); Assert.assertEquals( "Header is correct", HDR_EX1V, String.class.cast( r[0] ) ); }
Example #5
Source File: ParseMailInputTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * [PDI-6532] When mail header is not found returns empty String * * @throws Exception * */ @Test public void testHeadersParsedNegative() throws Exception { int[] fields = { MailInputField.COLUMN_HEADER }; MailInputField[] farr = this.getDefaultInputFields( fields ); farr[0].setName( HDR_EX1 + "salt" ); this.mockMailInputMeta( farr ); try { mailInput.processRow( meta, data ); } catch ( KettleException e ) { // don't worry about it } MessageParser underTest = mailInput.new MessageParser(); Object[] r = RowDataUtil.allocateRowData( data.nrFields ); underTest.parseToArray( r, message ); Assert.assertEquals( "Header is correct", "", String.class.cast( r[0] ) ); }
Example #6
Source File: PubsubMessageToKettleRowFn.java From kettle-beam with Apache License 2.0 | 6 votes |
@ProcessElement public void processElement( ProcessContext processContext ) { try { PubsubMessage message = processContext.element(); inputCounter.inc(); Object[] outputRow = RowDataUtil.allocateRowData(rowMeta.size()); outputRow[0] = message; // Serializable processContext.output( new KettleRow( outputRow ) ); writtenCounter.inc(); } catch ( Exception e ) { numErrors.inc(); LOG.error( "Error in pub/sub publish messages function", e ); throw new RuntimeException( "Error in pub/sub publish messages function", e ); } }
Example #7
Source File: ParseMailInputTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Test, message number can be parsed correctly * * @throws Exception */ @Test public void testMessageNumberIsParsed() throws Exception { int[] fields = { MailInputField.COLUMN_MESSAGE_NR }; MailInputField[] farr = this.getDefaultInputFields( fields ); this.mockMailInputMeta( farr ); try { mailInput.processRow( meta, data ); } catch ( KettleException e ) { // don't worry about it } MessageParser underTest = mailInput.new MessageParser(); Object[] r = RowDataUtil.allocateRowData( data.nrFields ); underTest.parseToArray( r, message ); Assert.assertEquals( "Message number is correct", new Long( MSG_NUMB ), Long.class.cast( r[0] ) ); }
Example #8
Source File: ParseMailInputTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Test message subject can be parsed * * @throws Exception */ @Test public void testMessageSubjectIsParsed() throws Exception { int[] fields = { MailInputField.COLUMN_SUBJECT }; MailInputField[] farr = this.getDefaultInputFields( fields ); this.mockMailInputMeta( farr ); try { mailInput.processRow( meta, data ); } catch ( KettleException e ) { // don't worry about it } MessageParser underTest = mailInput.new MessageParser(); Object[] r = RowDataUtil.allocateRowData( data.nrFields ); underTest.parseToArray( r, message ); Assert.assertEquals( "Message subject is correct", SUBJ, String.class.cast( r[0] ) ); }
Example #9
Source File: ParseMailInputTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Test message From can be parsed correctly * * @throws Exception */ @Test public void testMessageFromIsParsed() throws Exception { int[] fields = { MailInputField.COLUMN_SENDER }; MailInputField[] farr = this.getDefaultInputFields( fields ); this.mockMailInputMeta( farr ); try { mailInput.processRow( meta, data ); } catch ( KettleException e ) { // don't worry about it } MessageParser underTest = mailInput.new MessageParser(); Object[] r = RowDataUtil.allocateRowData( data.nrFields ); underTest.parseToArray( r, message ); // expect, that from is concatenated with ';' String expected = StringUtils.join( new String[] { FROM1, FROM2 }, ";" ); Assert.assertEquals( "Message From is correct", expected, String.class.cast( r[0] ) ); }
Example #10
Source File: ParseMailInputTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Test that message body can be parsed correctly * * @throws Exception */ @Test public void testMessageBodyIsParsed() throws Exception { int[] fields = { MailInputField.COLUMN_BODY }; MailInputField[] farr = this.getDefaultInputFields( fields ); this.mockMailInputMeta( farr ); try { mailInput.processRow( meta, data ); } catch ( KettleException e ) { // don't worry about it } MessageParser underTest = mailInput.new MessageParser(); Object[] r = RowDataUtil.allocateRowData( data.nrFields ); underTest.parseToArray( r, message ); Assert.assertEquals( "Message Body is correct", MSG_BODY, String.class.cast( r[0] ) ); }
Example #11
Source File: Flattener.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private Object[] createOutputRow( Object[] rowData ) { Object[] outputRowData = RowDataUtil.allocateRowData( data.outputRowMeta.size() ); int outputIndex = 0; // copy the values from previous, but don't take along index 'data.fieldNr'... // for ( int i = 0; i < data.inputRowMeta.size(); i++ ) { if ( i != data.fieldNr ) { outputRowData[outputIndex++] = rowData[i]; } } // Now add the fields we flattened... // for ( int i = 0; i < data.targetResult.length; i++ ) { outputRowData[outputIndex++] = data.targetResult[i]; } return outputRowData; }
Example #12
Source File: GoogleSpreadsheetInput.java From pdi-google-spreadsheet-plugin with BSD 3-Clause "New" or "Revised" License | 6 votes |
private Object[] readRow() { try { Object[] outputRowData = RowDataUtil.allocateRowData(data.outputRowMeta.size()); int outputIndex = 0; if (data.currentRow < data.rows.size()) { ListEntry row = data.rows.get(data.currentRow); for (ValueMetaInterface column : data.outputRowMeta.getValueMetaList()) { String value = row.getCustomElements().getValue(column.getName()); if (value == null) outputRowData[outputIndex++] = null; else outputRowData[outputIndex++] = value.getBytes("UTF-8"); } } else { return null; } return outputRowData; } catch (Exception e) { return null; } }
Example #13
Source File: ParseMailInputTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Test that message folder name can be parsed correctly * * @throws Exception */ @Test public void testMessageFolderNameIsParsed() throws Exception { int[] fields = { MailInputField.COLUMN_FOLDER_NAME }; MailInputField[] farr = this.getDefaultInputFields( fields ); this.mockMailInputMeta( farr ); try { mailInput.processRow( meta, data ); } catch ( KettleException e ) { // don't worry about it } MessageParser underTest = mailInput.new MessageParser(); Object[] r = RowDataUtil.allocateRowData( data.nrFields ); underTest.parseToArray( r, message ); Assert.assertEquals( "Message Folder Name is correct", FLD_NAME, String.class.cast( r[0] ) ); }
Example #14
Source File: ParseMailInputTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Test message recipients can be parsed * * @throws Exception */ @Test public void testMessageRecipientsIsParsed() throws Exception { int[] fields = { MailInputField.COLUMN_RECIPIENTS }; MailInputField[] farr = this.getDefaultInputFields( fields ); this.mockMailInputMeta( farr ); try { mailInput.processRow( meta, data ); } catch ( KettleException e ) { // don't worry about it } MessageParser underTest = mailInput.new MessageParser(); Object[] r = RowDataUtil.allocateRowData( data.nrFields ); underTest.parseToArray( r, message ); // is concatenated with ';' String expected = StringUtils.join( new String[] { REC1, REC2 }, ";" ); Assert.assertEquals( "Message Recipients is correct", expected, String.class.cast( r[0] ) ); }
Example #15
Source File: RowOutputConverter.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public Object[] getRow( Object[] baseOutputRow, Object[] rawPartRow, JsonInputData data ) throws KettleException { if ( rawPartRow == null ) { return null; } for ( int i = 0; i < rawPartRow.length; i++ ) { int outIdx = data.totalpreviousfields + i; Object val = getValue( data.outputRowMeta.getValueMeta( outIdx ), data.convertRowMeta.getValueMeta( outIdx ), rawPartRow[i] ); rawPartRow[i] = val; if ( val == null && data.repeatedFields.get( i ) && data.previousRow != null ) { rawPartRow[i] = data.previousRow[outIdx]; } } data.previousRow = RowDataUtil.addRowData( baseOutputRow, data.totalpreviousfields, rawPartRow ); return data.previousRow; }
Example #16
Source File: ParseMailInputTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Test message size is correct * * @throws Exception */ @Test public void testMessageSizeIsParsed() throws Exception { int[] fields = { MailInputField.COLUMN_SIZE }; MailInputField[] farr = this.getDefaultInputFields( fields ); this.mockMailInputMeta( farr ); try { mailInput.processRow( meta, data ); } catch ( KettleException e ) { // don't worry about it } MessageParser underTest = mailInput.new MessageParser(); Object[] r = RowDataUtil.allocateRowData( data.nrFields ); underTest.parseToArray( r, message ); Assert.assertEquals( "Message Size is correct", new Long( CNTNT_SIZE ), Long.class.cast( r[0] ) ); }
Example #17
Source File: ConcatFields.java From pentaho-kettle with Apache License 2.0 | 6 votes |
Object[] prepareOutputRow( Object[] r ) { Object[] outputRowData = null; if ( !meta.isRemoveSelectedFields() ) { // reserve room for the target field outputRowData = RowDataUtil.resizeArray( r, data.outputRowMeta.size() ); } else { // reserve room for the target field and re-map the fields outputRowData = new Object[ data.outputRowMeta.size() + RowDataUtil.OVER_ALLOCATE_SIZE ]; if ( r != null ) { // re-map the fields for ( int i = 0; i < data.remainingFieldsInputOutputMapping.length; i++ ) { // BTW: the new target field is not // here outputRowData[ i ] = r[ data.remainingFieldsInputOutputMapping[ i ] ]; } } } return outputRowData; }
Example #18
Source File: JsonInput.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * allocates out row */ private Object[] buildBaseOutputRow() { Object[] outputRowData; if ( data.readrow != null ) { if ( meta.isRemoveSourceField() && data.indexSourceField > -1 ) { // skip the source field in the output array int sz = data.readrow.length; outputRowData = RowDataUtil.allocateRowData( data.outputRowMeta.size() ); int ii = 0; for ( int i = 0; i < sz; i++ ) { if ( i != data.indexSourceField ) { outputRowData[ ii++ ] = data.readrow[ i ]; } } } else { outputRowData = RowDataUtil.createResizedCopy( data.readrow, data.outputRowMeta.size() ); } } else { outputRowData = RowDataUtil.allocateRowData( data.outputRowMeta.size() ); } return outputRowData; }
Example #19
Source File: ParseMailInputTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Test that message folder name can be parsed correctly * * @throws Exception */ @Test public void testMessageAttachedFilesCountNameIsParsed() throws Exception { int[] fields = { MailInputField.COLUMN_ATTACHED_FILES_COUNT }; MailInputField[] farr = this.getDefaultInputFields( fields ); this.mockMailInputMeta( farr ); try { mailInput.processRow( meta, data ); } catch ( KettleException e ) { // don't worry about it } MessageParser underTest = mailInput.new MessageParser(); Object[] r = RowDataUtil.allocateRowData( data.nrFields ); underTest.parseToArray( r, message ); Assert.assertEquals( "Message Attached files count is correct", new Long( ATTCH_COUNT ), Long.class .cast( r[0] ) ); }
Example #20
Source File: EditRowsDialog.java From pentaho-pdi-dataset with Apache License 2.0 | 6 votes |
private Object[] getRowForData( TableItem item, int rowNr ) throws KettleException { try { Object[] row = RowDataUtil.allocateRowData( rowMeta.size() ); for ( int i = 0; i < rowMeta.size(); i++ ) { ValueMetaInterface valueMeta = rowMeta.getValueMeta( i ); ValueMetaInterface stringValueMeta = stringRowMeta.getValueMeta( i ); int colnr = i + 1; if ( GUIResource.getInstance().getColorBlue().equals( item.getForeground( colnr ) ) ) { row[ i ] = null; // <null> value } else { String string = item.getText( colnr ); row[ i ] = valueMeta.convertDataFromString( string, stringValueMeta, null, null, ValueMetaInterface.TRIM_TYPE_NONE ); } } return row; } catch ( KettleException e ) { throw new KettleException( BaseMessages.getString( PKG, "EditRowsDialog.Error.ErrorGettingRowForData", Integer.toString( rowNr ) ), e ); } }
Example #21
Source File: FuzzyMatch.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private Object[] lookupValues( RowMetaInterface rowMeta, Object[] row ) throws KettleException { if ( first ) { first = false; data.outputRowMeta = getInputRowMeta().clone(); meta.getFields( data.outputRowMeta, getStepname(), new RowMetaInterface[] { data.infoMeta }, null, this, repository, metaStore ); // Check lookup field data.indexOfMainField = getInputRowMeta().indexOfValue( environmentSubstitute( meta.getMainStreamField() ) ); if ( data.indexOfMainField < 0 ) { // The field is unreachable ! throw new KettleException( BaseMessages.getString( PKG, "FuzzyMatch.Exception.CouldnotFindMainField", meta .getMainStreamField() ) ); } } Object[] add = null; if ( row[ data.indexOfMainField ] == null ) { add = buildEmptyRow(); } else { try { add = getFromCache( row ); } catch ( Exception e ) { throw new KettleStepException( e ); } } return RowDataUtil.addRowData( row, rowMeta.size(), add ); }
Example #22
Source File: TransExecutor.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@VisibleForTesting void collectExecutionResultFiles( Result result ) throws KettleException { RowSet resultFilesRowSet = getData().getResultFilesRowSet(); if ( meta.getResultFilesTargetStepMeta() != null && result.getResultFilesList() != null && resultFilesRowSet != null ) { for ( ResultFile resultFile : result.getResultFilesList() ) { Object[] targetRow = RowDataUtil.allocateRowData( getData().getResultFilesOutputRowMeta().size() ); int idx = 0; targetRow[ idx++ ] = resultFile.getFile().getName().toString(); // TODO: time, origin, ... putRowTo( getData().getResultFilesOutputRowMeta(), targetRow, resultFilesRowSet ); } } }
Example #23
Source File: TableInput.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private RowMetaAndData readStartDate() throws KettleException { if ( log.isDetailed() ) { logDetailed( "Reading from step [" + data.infoStream.getStepname() + "]" ); } RowMetaInterface parametersMeta = new RowMeta(); Object[] parametersData = new Object[] {}; RowSet rowSet = findInputRowSet( data.infoStream.getStepname() ); if ( rowSet != null ) { Object[] rowData = getRowFrom( rowSet ); // rows are originating from "lookup_from" while ( rowData != null ) { parametersData = RowDataUtil.addRowData( parametersData, parametersMeta.size(), rowData ); parametersMeta.addRowMeta( rowSet.getRowMeta() ); rowData = getRowFrom( rowSet ); // take all input rows if needed! } if ( parametersMeta.size() == 0 ) { throw new KettleException( "Expected to read parameters from step [" + data.infoStream.getStepname() + "] but none were found." ); } } else { throw new KettleException( "Unable to find rowset to read from, perhaps step [" + data.infoStream.getStepname() + "] doesn't exist. (or perhaps you are trying a preview?)" ); } RowMetaAndData parameters = new RowMetaAndData( parametersMeta, parametersData ); return parameters; }
Example #24
Source File: GetFilesRowsCount.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private Object[] getOneRow() throws KettleException { if ( !openNextFile() ) { return null; } // Build an empty row based on the meta-data Object[] r; try { // Create new row or clone if ( meta.isFileField() ) { r = data.readrow.clone(); r = RowDataUtil.resizeArray( r, data.outputRowMeta.size() ); } else { r = RowDataUtil.allocateRowData( data.outputRowMeta.size() ); } if ( meta.isSmartCount() && data.foundData ) { // We have data right the last separator, // we need to update the row count data.rownr++; } r[data.totalpreviousfields] = data.rownr; if ( meta.includeCountFiles() ) { r[data.totalpreviousfields + 1] = data.filenr; } incrementLinesInput(); } catch ( Exception e ) { throw new KettleException( "Unable to read row from file", e ); } return r; }
Example #25
Source File: DummyPlugin.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (DummyPluginMeta) smi; data = (DummyPluginData) sdi; Object[] r = getRow(); // get row, blocks when needed! if ( r == null ) { // no more input to be expected... setOutputDone(); return false; } if ( first ) { first = false; data.outputRowMeta = getInputRowMeta().clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this ); } Object extraValue = meta.getValue().getValueData(); Object[] outputRow = RowDataUtil.addValueData( r, data.outputRowMeta.size() - 1, extraValue ); putRow( data.outputRowMeta, outputRow ); // copy row to possible alternate rowset(s). if ( checkFeedback( getLinesRead() ) ) { logBasic( "Linenr " + getLinesRead() ); // Some basic logging every 5000 rows. } return true; }
Example #26
Source File: UniqueRows.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private Object[] addCounter( RowMetaInterface outputRowMeta, Object[] r, long count ) { if ( meta.isCountRows() ) { Object[] outputRow = RowDataUtil.addValueData( r, outputRowMeta.size() - 1, new Long( count ) ); return outputRow; } else { return r; // nothing to do } }
Example #27
Source File: JsonOutput.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@SuppressWarnings( "unchecked" ) private void outPutRow( Object[] rowData ) throws KettleStepException { // We can now output an object data.jg = new JSONObject(); data.jg.put( data.realBlocName, data.ja ); String value = data.jg.toJSONString(); if ( data.outputValue && data.outputRowMeta != null ) { Object[] outputRowData = RowDataUtil.addValueData( rowData, data.inputRowMetaSize, value ); incrementLinesOutput(); putRow( data.outputRowMeta, outputRowData ); } if ( data.writeToFile && !data.ja.isEmpty() ) { // Open a file if ( !openNewFile() ) { throw new KettleStepException( BaseMessages.getString( PKG, "JsonOutput.Error.OpenNewFile", buildFilename() ) ); } // Write data to file try { data.writer.write( value ); } catch ( Exception e ) { throw new KettleStepException( BaseMessages.getString( PKG, "JsonOutput.Error.Writing" ), e ); } // Close file closeFile(); } // Data are safe data.rowsAreSafe = true; data.ja = new JSONArray(); }
Example #28
Source File: OlapInput.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { try { if ( first ) { // we just got started first = false; meta.initData( this ); data.rowNumber = 0; } for ( ; data.rowNumber < data.olapHelper.getRows().length; data.rowNumber++ ) { String[] row = data.olapHelper.getRows()[data.rowNumber]; Object[] outputRowData = RowDataUtil.allocateRowData( row.length ); outputRowData = row; putRow( data.outputRowMeta, outputRowData ); } setOutputDone(); // signal end to receiver(s) return false; // end of data or error. } catch ( Exception e ) { logError( "An error occurred, processing will be stopped", e ); setErrors( 1 ); stopAll(); return false; } }
Example #29
Source File: AnalyticQuery.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void processQueueObjectAt( int i ) throws KettleStepException { int index = i - 1; Object[] rows = data.data.toArray(); Object[] fields = new Object[meta.getNumberOfFields()]; for ( int j = 0; j < meta.getNumberOfFields(); j++ ) { // field_index is the location inside a row of the subject of this // ie, ORDERTOTAL might be the subject ofthis field lag or lead // so we determine that ORDERTOTAL's index in the row int field_index = data.inputRowMeta.indexOfValue( meta.getSubjectField()[j] ); int row_index = 0; switch ( meta.getAggregateType()[j] ) { case AnalyticQueryMeta.TYPE_FUNCT_LAG: row_index = index - meta.getValueField()[j]; break; case AnalyticQueryMeta.TYPE_FUNCT_LEAD: row_index = index + meta.getValueField()[j]; break; default: break; } if ( row_index < rows.length && row_index >= 0 ) { Object[] singleRow = (Object[]) rows[row_index]; if ( singleRow != null && singleRow[field_index] != null ) { fields[j] = ( (Object[]) rows[row_index] )[field_index]; } else { // set default fields[j] = null; } } else { // set default fields[j] = null; } } Object[] newRow = RowDataUtil.addRowData( (Object[]) rows[index], data.inputRowMeta.size(), fields ); putRow( data.outputRowMeta, newRow ); }
Example #30
Source File: AggregateRows.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private synchronized Object[] buildAggregate() { Object[] agg = RowDataUtil.allocateRowData( data.outputRowMeta.size() ); for ( int i = 0; i < data.fieldnrs.length; i++ ) { switch ( meta.getAggregateType()[i] ) { case AggregateRowsMeta.TYPE_AGGREGATE_SUM: case AggregateRowsMeta.TYPE_AGGREGATE_MIN: case AggregateRowsMeta.TYPE_AGGREGATE_MAX: case AggregateRowsMeta.TYPE_AGGREGATE_FIRST: case AggregateRowsMeta.TYPE_AGGREGATE_LAST: case AggregateRowsMeta.TYPE_AGGREGATE_NONE: case AggregateRowsMeta.TYPE_AGGREGATE_FIRST_NULL: // First value, EVEN if it's NULL: case AggregateRowsMeta.TYPE_AGGREGATE_LAST_NULL: // Last value, EVEN if it's NULL: agg[i] = data.values[i]; break; case AggregateRowsMeta.TYPE_AGGREGATE_COUNT: agg[i] = new Double( data.counts[i] ); break; case AggregateRowsMeta.TYPE_AGGREGATE_AVERAGE: agg[i] = new Double( ( (Double) data.values[i] ).doubleValue() / data.counts[i] ); break; default: break; } } return agg; }