Java Code Examples for org.pentaho.di.core.row.RowDataUtil#resizeArray()
The following examples show how to use
org.pentaho.di.core.row.RowDataUtil#resizeArray() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ReplaceString.java From pentaho-kettle with Apache License 2.0 | 6 votes |
synchronized Object[] getOneRow( RowMetaInterface rowMeta, Object[] row ) throws KettleException { Object[] rowData = RowDataUtil.resizeArray( row, data.outputRowMeta.size() ); int index = 0; Set<Integer> numFieldsAlreadyBeenTransformed = new HashSet<Integer>(); for ( int i = 0; i < data.numFields; i++ ) { RowMetaInterface currentRowMeta = ( numFieldsAlreadyBeenTransformed.contains( data.inStreamNrs[i] ) ) ? data.outputRowMeta : getInputRowMeta(); String value = replaceString( currentRowMeta.getString( rowData, data.inStreamNrs[i] ), data.patterns[i], getResolvedReplaceByString( i, row ) ); if ( Utils.isEmpty( data.outStreamNrs[i] ) ) { // update field value rowData[data.inStreamNrs[i]] = value; numFieldsAlreadyBeenTransformed.add( data.inStreamNrs[i] ); } else { // add new field value rowData[data.inputFieldsNr + index++] = value; } } return rowData; }
Example 2
Source File: ConcatFields.java From pentaho-kettle with Apache License 2.0 | 6 votes |
Object[] prepareOutputRow( Object[] r ) { Object[] outputRowData = null; if ( !meta.isRemoveSelectedFields() ) { // reserve room for the target field outputRowData = RowDataUtil.resizeArray( r, data.outputRowMeta.size() ); } else { // reserve room for the target field and re-map the fields outputRowData = new Object[ data.outputRowMeta.size() + RowDataUtil.OVER_ALLOCATE_SIZE ]; if ( r != null ) { // re-map the fields for ( int i = 0; i < data.remainingFieldsInputOutputMapping.length; i++ ) { // BTW: the new target field is not // here outputRowData[ i ] = r[ data.remainingFieldsInputOutputMapping[ i ] ]; } } } return outputRowData; }
Example 3
Source File: GetSlaveSequence.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public Object[] addSequence( RowMetaInterface inputRowMeta, Object[] inputRowData ) throws KettleException { Object next = null; // Are we still in the sequence range? // if ( data.value >= ( data.startValue + data.increment ) ) { // Get a new value from the service... // data.startValue = data.slaveServer.getNextSlaveSequenceValue( data.sequenceName, data.increment ); data.value = data.startValue; } next = Long.valueOf( data.value ); data.value++; if ( next != null ) { Object[] outputRowData = inputRowData; if ( inputRowData.length < inputRowMeta.size() + 1 ) { outputRowData = RowDataUtil.resizeArray( inputRowData, inputRowMeta.size() + 1 ); } outputRowData[inputRowMeta.size()] = next; return outputRowData; } else { throw new KettleStepException( BaseMessages.getString( PKG, "GetSequence.Exception.CouldNotFindNextValueForSequence" ) + meta.getValuename() ); } }
Example 4
Source File: GetFilesRowsCount.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private Object[] getOneRow() throws KettleException { if ( !openNextFile() ) { return null; } // Build an empty row based on the meta-data Object[] r; try { // Create new row or clone if ( meta.isFileField() ) { r = data.readrow.clone(); r = RowDataUtil.resizeArray( r, data.outputRowMeta.size() ); } else { r = RowDataUtil.allocateRowData( data.outputRowMeta.size() ); } if ( meta.isSmartCount() && data.foundData ) { // We have data right the last separator, // we need to update the row count data.rownr++; } r[data.totalpreviousfields] = data.rownr; if ( meta.includeCountFiles() ) { r[data.totalpreviousfields + 1] = data.filenr; } incrementLinesInput(); } catch ( Exception e ) { throw new KettleException( "Unable to read row from file", e ); } return r; }
Example 5
Source File: YamlInput.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private Object[] getRowData() throws KettleException { // Build an empty row based on the meta-data Object[] outputRowData = null; try { // Create new row... outputRowData = data.yaml.getRow( data.rowMeta ); if ( outputRowData == null ) { return null; } if ( data.readrow != null ) { outputRowData = RowDataUtil.addRowData( data.readrow, data.totalPreviousFields, outputRowData ); } else { outputRowData = RowDataUtil.resizeArray( outputRowData, data.totalOutStreamFields ); } int rowIndex = data.totalOutFields; // See if we need to add the filename to the row... if ( meta.includeFilename() && !Utils.isEmpty( meta.getFilenameField() ) ) { outputRowData[rowIndex++] = KettleVFS.getFilename( data.file ); } // See if we need to add the row number to the row... if ( meta.includeRowNumber() && !Utils.isEmpty( meta.getRowNumberField() ) ) { outputRowData[rowIndex++] = new Long( data.rownr ); } } catch ( Exception e ) { boolean sendToErrorRow = false; String errorMessage = null; if ( getStepMeta().isDoingErrorHandling() ) { sendToErrorRow = true; errorMessage = e.toString(); } else { logError( BaseMessages.getString( PKG, "YamlInput.ErrorInStepRunning", e.toString() ) ); setErrors( 1 ); stopAll(); logError( Const.getStackTracker( e ) ); setOutputDone(); // signal end to receiver(s) } if ( sendToErrorRow ) { // Simply add this row to the error row putError( getInputRowMeta(), outputRowData, 1, errorMessage, null, "YamlInput001" ); } } return outputRowData; }
Example 6
Source File: DBProc.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private Object[] runProc( RowMetaInterface rowMeta, Object[] rowData ) throws KettleException { if ( first ) { first = false; // get the RowMeta for the output // data.outputMeta = data.inputRowMeta.clone(); meta.getFields( data.outputMeta, getStepname(), null, null, this, repository, metaStore ); data.argnrs = new int[meta.getArgument().length]; for ( int i = 0; i < meta.getArgument().length; i++ ) { if ( !meta.getArgumentDirection()[i].equalsIgnoreCase( "OUT" ) ) { // IN or INOUT data.argnrs[i] = rowMeta.indexOfValue( meta.getArgument()[i] ); if ( data.argnrs[i] < 0 ) { logError( BaseMessages.getString( PKG, "DBProc.Log.ErrorFindingField" ) + meta.getArgument()[i] + "]" ); throw new KettleStepException( BaseMessages.getString( PKG, "DBProc.Exception.CouldnotFindField", meta .getArgument()[i] ) ); } } else { data.argnrs[i] = -1; } } data.db.setProcLookup( environmentSubstitute( meta.getProcedure() ), meta.getArgument(), meta .getArgumentDirection(), meta.getArgumentType(), meta.getResultName(), meta.getResultType() ); } Object[] outputRowData = RowDataUtil.resizeArray( rowData, data.outputMeta.size() ); int outputIndex = rowMeta.size(); data.db.setProcValues( rowMeta, rowData, data.argnrs, meta.getArgumentDirection(), !Utils.isEmpty( meta .getResultName() ) ); RowMetaAndData add = data.db.callProcedure( meta.getArgument(), meta.getArgumentDirection(), meta.getArgumentType(), meta .getResultName(), meta.getResultType() ); int addIndex = 0; // Function return? if ( !Utils.isEmpty( meta.getResultName() ) ) { outputRowData[outputIndex++] = add.getData()[addIndex++]; // first is the function return } // We are only expecting the OUT and INOUT arguments here. // The INOUT values need to replace the value with the same name in the row. // for ( int i = 0; i < data.argnrs.length; i++ ) { if ( meta.getArgumentDirection()[i].equalsIgnoreCase( "OUT" ) ) { // add outputRowData[outputIndex++] = add.getData()[addIndex++]; } else if ( meta.getArgumentDirection()[i].equalsIgnoreCase( "INOUT" ) ) { // replace outputRowData[data.argnrs[i]] = add.getData()[addIndex]; addIndex++; } // IN not taken } return outputRowData; }
Example 7
Source File: AddSequence.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public Object[] addSequence( RowMetaInterface inputRowMeta, Object[] inputRowData ) throws KettleException { Object next = null; if ( meta.isCounterUsed() ) { synchronized ( data.counter ) { long prev = data.counter.getCounter(); long nval = prev + data.increment; if ( data.increment > 0 && data.maximum > data.start && nval > data.maximum ) { nval = data.start; } if ( data.increment < 0 && data.maximum < data.start && nval < data.maximum ) { nval = data.start; } data.counter.setCounter( nval ); next = prev; } } else if ( meta.isDatabaseUsed() ) { try { next = data.getDb().getNextSequenceValue( data.realSchemaName, data.realSequenceName, meta.getValuename() ); } catch ( KettleDatabaseException dbe ) { throw new KettleStepException( BaseMessages.getString( PKG, "AddSequence.Exception.ErrorReadingSequence", data.realSequenceName ), dbe ); } } else { // This should never happen, but if it does, don't continue!!! throw new KettleStepException( BaseMessages.getString( PKG, "AddSequence.Exception.NoSpecifiedMethod" ) ); } if ( next != null ) { Object[] outputRowData = inputRowData; if ( inputRowData.length < inputRowMeta.size() + 1 ) { outputRowData = RowDataUtil.resizeArray( inputRowData, inputRowMeta.size() + 1 ); } outputRowData[inputRowMeta.size()] = next; return outputRowData; } else { throw new KettleStepException( BaseMessages.getString( PKG, "AddSequence.Exception.CouldNotFindNextValueForSequence" ) + meta.getValuename() ); } }
Example 8
Source File: ElasticSearchBulk.java From pentaho-kettle with Apache License 2.0 | 3 votes |
private void addIdToRow( String id, int rowIndex ) { data.inputRowBuffer[rowIndex] = RowDataUtil.resizeArray( data.inputRowBuffer[rowIndex], getInputRowMeta().size() + 1 ); data.inputRowBuffer[rowIndex][getInputRowMeta().size()] = id; }