Java Code Examples for org.pentaho.di.core.row.RowMetaInterface#addRowMeta()
The following examples show how to use
org.pentaho.di.core.row.RowMetaInterface#addRowMeta() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: OlapInputMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Override public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { RowMetaInterface add = null; try { initData( space ); add = data.outputRowMeta; } catch ( Exception dbe ) { throw new KettleStepException( "Unable to get query result for MDX query: " + Const.CR + mdx, dbe ); } // Set the origin // for ( int i = 0; i < add.size(); i++ ) { ValueMetaInterface v = add.getValueMeta( i ); v.setOrigin( origin ); } row.addRowMeta( add ); }
Example 2
Source File: PaloCellInputMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Override public void getFields( final RowMetaInterface row, final String origin, final RowMetaInterface[] info, final StepMeta nextStep, final VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( databaseMeta == null ) { throw new KettleStepException( "There is no Palo database server connection defined" ); } final PaloHelper helper = new PaloHelper( databaseMeta, DefaultLogLevel.getLogLevel() ); try { helper.connect(); try { final RowMetaInterface rowMeta = helper.getCellRowMeta( this.cube, this.fields, this.cubeMeasure ); row.addRowMeta( rowMeta ); } finally { helper.disconnect(); } } catch ( Exception e ) { throw new KettleStepException( e ); } }
Example 3
Source File: PaloDimInputMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void getFields( final RowMetaInterface row, final String origin, final RowMetaInterface[] info, final StepMeta nextStep, final VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( databaseMeta == null ) { throw new KettleStepException( "There is no Palo database server connection defined" ); } final PaloHelper helper = new PaloHelper( databaseMeta, DefaultLogLevel.getLogLevel() ); try { helper.connect(); try { final RowMetaInterface rowMeta = helper.getDimensionRowMeta( this.getDimension(), this.getLevels(), this.getBaseElementsOnly() ); row.addRowMeta( rowMeta ); } finally { helper.disconnect(); } } catch ( Exception e ) { throw new KettleStepException( e ); } }
Example 4
Source File: OpenERPObjectInputMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void getFields( final RowMetaInterface row, final String origin, final RowMetaInterface[] info, final StepMeta nextStep, final VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( databaseMeta == null ) { throw new KettleStepException( "There is no OpenERP database server connection defined" ); } final OpenERPHelper helper = new OpenERPHelper( databaseMeta ); try { helper.StartSession(); final RowMetaInterface rowMeta = this.getRowMeta(); row.addRowMeta( rowMeta ); } catch ( Exception e ) { throw new KettleStepException( e ); } }
Example 5
Source File: FilesFromResultMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // Add the fields from a ResultFile try { ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject( "foo.bar", space ), "parentOrigin", "origin" ); RowMetaAndData add = resultFile.getRow(); // Set the origin on the fields... for ( int i = 0; i < add.size(); i++ ) { add.getValueMeta( i ).setOrigin( name ); } r.addRowMeta( add.getRowMeta() ); } catch ( KettleFileException e ) { throw new KettleStepException( e ); } }
Example 6
Source File: JoinRowsMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Override public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( space instanceof TransMeta ) { TransMeta transMeta = (TransMeta) space; StepMeta[] steps = transMeta.getPrevSteps( transMeta.findStep( origin ) ); StepMeta mainStep = transMeta.findStep( getMainStepname() ); rowMeta.clear(); if ( mainStep != null ) { rowMeta.addRowMeta( transMeta.getStepFields( mainStep ) ); } for ( StepMeta step : steps ) { if ( mainStep == null || !step.equals( mainStep ) ) { rowMeta.addRowMeta( transMeta.getStepFields( step ) ); } } } }
Example 7
Source File: SingleThreaderMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // First load some interesting data... // // Then see which fields get added to the row. // TransMeta mappingTransMeta = null; try { mappingTransMeta = loadSingleThreadedTransMeta( this, repository, space ); } catch ( KettleException e ) { throw new KettleStepException( BaseMessages.getString( PKG, "SingleThreaderMeta.Exception.UnableToLoadMappingTransformation" ), e ); } row.clear(); // Let's keep it simple! // if ( !Utils.isEmpty( space.environmentSubstitute( retrieveStep ) ) ) { RowMetaInterface stepFields = mappingTransMeta.getStepFields( retrieveStep ); row.addRowMeta( stepFields ); } }
Example 8
Source File: CPythonScriptExecutorMeta.java From pentaho-cpython-plugin with Apache License 2.0 | 6 votes |
/** * Given a fully defined output row metadata structure, determine which of the output fields are being copied from * the input fields and which must be the output of the script. * * @param fullOutputRowMeta the fully defined output row metadata structure * @param scriptFields row meta that will hold script only fields * @param inputPresentInOutput row meta that will hold input fields being copied * @param infos the array of info row metas * @param stepName the name of the step */ protected void determineInputFieldScriptFieldSplit( RowMetaInterface fullOutputRowMeta, RowMetaInterface scriptFields, RowMetaInterface inputPresentInOutput, RowMetaInterface[] infos, String stepName ) { scriptFields.clear(); inputPresentInOutput.clear(); RowMetaInterface consolidatedInputFields = new RowMeta(); for ( RowMetaInterface r : infos ) { consolidatedInputFields.addRowMeta( r ); } for ( ValueMetaInterface vm : fullOutputRowMeta.getValueMetaList() ) { int index = consolidatedInputFields.indexOfValue( vm.getName() ); if ( index >= 0 ) { inputPresentInOutput.addValueMeta( vm ); } else { // must be a script output (either a variable name field or data frame column name scriptFields.addValueMeta( vm ); } } }
Example 9
Source File: XBaseInputMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { FileInputList fileList = getTextFileList( space ); if ( fileList.nrOfFiles() == 0 ) { throw new KettleStepException( BaseMessages .getString( PKG, "XBaseInputMeta.Exception.NoFilesFoundToProcess" ) ); } row.addRowMeta( getOutputFields( fileList, name ) ); }
Example 10
Source File: SelectValuesMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { try { RowMetaInterface rowMeta = inputRowMeta.clone(); inputRowMeta.clear(); inputRowMeta.addRowMeta( rowMeta ); getSelectFields( inputRowMeta, name ); getDeleteFields( inputRowMeta ); getMetadataFields( inputRowMeta, name ); } catch ( Exception e ) { throw new KettleStepException( e ); } }
Example 11
Source File: BeamInputMeta.java From kettle-beam with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { if (metaStore!=null) { FileDefinition fileDefinition = loadFileDefinition( metaStore ); try { inputRowMeta.clear(); inputRowMeta.addRowMeta( fileDefinition.getRowMeta() ); } catch ( KettlePluginException e ) { throw new KettleStepException( "Unable to get row layout of file definition '" + fileDefinition.getName() + "'", e ); } } }
Example 12
Source File: TableInput.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private RowMetaAndData readStartDate() throws KettleException { if ( log.isDetailed() ) { logDetailed( "Reading from step [" + data.infoStream.getStepname() + "]" ); } RowMetaInterface parametersMeta = new RowMeta(); Object[] parametersData = new Object[] {}; RowSet rowSet = findInputRowSet( data.infoStream.getStepname() ); if ( rowSet != null ) { Object[] rowData = getRowFrom( rowSet ); // rows are originating from "lookup_from" while ( rowData != null ) { parametersData = RowDataUtil.addRowData( parametersData, parametersMeta.size(), rowData ); parametersMeta.addRowMeta( rowSet.getRowMeta() ); rowData = getRowFrom( rowSet ); // take all input rows if needed! } if ( parametersMeta.size() == 0 ) { throw new KettleException( "Expected to read parameters from step [" + data.infoStream.getStepname() + "] but none were found." ); } } else { throw new KettleException( "Unable to find rowset to read from, perhaps step [" + data.infoStream.getStepname() + "] doesn't exist. (or perhaps you are trying a preview?)" ); } RowMetaAndData parameters = new RowMetaAndData( parametersMeta, parametersData ); return parameters; }
Example 13
Source File: MondrianInputMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( databaseMeta == null ) { return; // TODO: throw an exception here } RowMetaInterface add = null; try { String mdx = getSQL(); if ( isVariableReplacementActive() ) { mdx = space.environmentSubstitute( mdx ); } MondrianHelper helper = new MondrianHelper( databaseMeta, catalog, mdx, space ); add = helper.getCachedRowMeta(); if ( add == null ) { helper.openQuery(); helper.createRectangularOutput(); add = helper.getOutputRowMeta(); } } catch ( KettleDatabaseException dbe ) { throw new KettleStepException( "Unable to get query result for MDX query: " + Const.CR + sql, dbe ); } // Set the origin // for ( int i = 0; i < add.size(); i++ ) { ValueMetaInterface v = add.getValueMeta( i ); v.setOrigin( origin ); } row.addRowMeta( add ); }
Example 14
Source File: AnalyticQueryMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void getFields( RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // re-assemble a new row of metadata // RowMetaInterface fields = new RowMeta(); // Add existing values fields.addRowMeta( r ); // add analytic values for ( int i = 0; i < number_of_fields; i++ ) { int index_of_subject = -1; index_of_subject = r.indexOfValue( subjectField[i] ); // if we found the subjectField in the RowMetaInterface, and we should.... if ( index_of_subject > -1 ) { ValueMetaInterface vmi = r.getValueMeta( index_of_subject ).clone(); vmi.setOrigin( origin ); vmi.setName( aggregateField[i] ); fields.addValueMeta( r.size() + i, vmi ); } else { // we have a condition where the subjectField can't be found from the rowMetaInterface StringBuilder sbfieldNames = new StringBuilder(); String[] fieldNames = r.getFieldNames(); for ( int j = 0; j < fieldNames.length; j++ ) { sbfieldNames.append( "[" + fieldNames[j] + "]" + ( j < fieldNames.length - 1 ? ", " : "" ) ); } throw new KettleStepException( BaseMessages.getString( PKG, "AnalyticQueryMeta.Exception.SubjectFieldNotFound", getParentStepMeta().getName(), subjectField[i], sbfieldNames.toString() ) ); } } r.clear(); // Add back to Row Meta r.addRowMeta( fields ); }
Example 15
Source File: ClosureGeneratorMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // The output for the closure table is: // // - parentId // - childId // - distance // // Nothing else. // RowMetaInterface result = new RowMeta(); ValueMetaInterface parentValueMeta = row.searchValueMeta( parentIdFieldName ); if ( parentValueMeta != null ) { result.addValueMeta( parentValueMeta ); } ValueMetaInterface childValueMeta = row.searchValueMeta( childIdFieldName ); if ( childValueMeta != null ) { result.addValueMeta( childValueMeta ); } ValueMetaInterface distanceValueMeta = new ValueMetaInteger( distanceFieldName ); distanceValueMeta.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH ); result.addValueMeta( distanceValueMeta ); row.clear(); row.addRowMeta( result ); }
Example 16
Source File: ExecuteTestsMeta.java From pentaho-pdi-dataset with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { RowMetaInterface rowMeta = UnitTestResult.getRowMeta(); int index = 0; rowMeta.getValueMeta( index++ ).setName( space.environmentSubstitute( transformationNameField ) ); rowMeta.getValueMeta( index++ ).setName( space.environmentSubstitute( unitTestNameField ) ); rowMeta.getValueMeta( index++ ).setName( space.environmentSubstitute( dataSetNameField ) ); rowMeta.getValueMeta( index++ ).setName( space.environmentSubstitute( stepNameField ) ); rowMeta.getValueMeta( index++ ).setName( space.environmentSubstitute( errorField ) ); rowMeta.getValueMeta( index++ ).setName( space.environmentSubstitute( commentField ) ); inputRowMeta.clear(); inputRowMeta.addRowMeta( rowMeta ); }
Example 17
Source File: TableInputMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( databaseMeta == null ) { return; // TODO: throw an exception here } if ( cachedRowMetaActive ) { row.addRowMeta( cachedRowMeta ); return; } boolean param = false; Database db = getDatabase(); super.databases = new Database[] { db }; // keep track of it for canceling purposes... // First try without connecting to the database... (can be S L O W) String sNewSQL = sql; if ( isVariableReplacementActive() ) { sNewSQL = db.environmentSubstitute( sql ); if ( space != null ) { sNewSQL = space.environmentSubstitute( sNewSQL ); } } RowMetaInterface add = null; try { add = db.getQueryFields( sNewSQL, param ); } catch ( KettleDatabaseException dbe ) { throw new KettleStepException( "Unable to get queryfields for SQL: " + Const.CR + sNewSQL, dbe ); } if ( add != null ) { attachOrigin( add, origin ); row.addRowMeta( add ); } else { try { db.connect(); RowMetaInterface paramRowMeta = null; Object[] paramData = null; StreamInterface infoStream = getStepIOMeta().getInfoStreams().get( 0 ); if ( !Utils.isEmpty( infoStream.getStepname() ) ) { param = true; if ( info.length > 0 && info[ 0 ] != null ) { paramRowMeta = info[ 0 ]; paramData = RowDataUtil.allocateRowData( paramRowMeta.size() ); } } add = db.getQueryFields( sNewSQL, param, paramRowMeta, paramData ); if ( add == null ) { return; } attachOrigin( add, origin ); row.addRowMeta( add ); } catch ( KettleException ke ) { throw new KettleStepException( "Unable to get queryfields for SQL: " + Const.CR + sNewSQL, ke ); } finally { db.disconnect(); } } if ( isLazyConversionActive() ) { for ( int i = 0; i < row.size(); i++ ) { ValueMetaInterface v = row.getValueMeta( i ); try { if ( v.getType() == ValueMetaInterface.TYPE_STRING ) { ValueMetaInterface storageMeta = ValueMetaFactory.cloneValueMeta( v ); storageMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); v.setStorageMetadata( storageMeta ); v.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); } } catch ( KettlePluginException e ) { throw new KettleStepException( "Unable to clone meta for lazy conversion: " + Const.CR + v, e ); } } } }
Example 18
Source File: SimpleMappingMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // First load some interesting data... // Then see which fields get added to the row. // TransMeta mappingTransMeta = null; try { mappingTransMeta = loadMappingMeta( this, repository, metaStore, space, mappingParameters.isInheritingAllVariables() ); } catch ( KettleException e ) { throw new KettleStepException( BaseMessages.getString( PKG, "SimpleMappingMeta.Exception.UnableToLoadMappingTransformation" ), e ); } // The field structure may depend on the input parameters as well (think of parameter replacements in MDX queries // for instance) if ( mappingParameters != null && mappingTransMeta != null ) { // Just set the variables in the transformation statically. // This just means: set a number of variables or parameter values: // StepWithMappingMeta.activateParams( mappingTransMeta, mappingTransMeta, space, mappingTransMeta.listParameters(), mappingParameters.getVariable(), mappingParameters.getInputField(), mappingParameters.isInheritingAllVariables() ); } // Keep track of all the fields that need renaming... // List<MappingValueRename> inputRenameList = new ArrayList<MappingValueRename>(); // // Before we ask the mapping outputs anything, we should teach the mapping // input steps in the sub-transformation about the data coming in... // RowMetaInterface inputRowMeta; // The row metadata, what we pass to the mapping input step // definition.getOutputStep(), is "row" // However, we do need to re-map some fields... // inputRowMeta = row.clone(); if ( !inputRowMeta.isEmpty() ) { for ( MappingValueRename valueRename : inputMapping.getValueRenames() ) { ValueMetaInterface valueMeta = inputRowMeta.searchValueMeta( valueRename.getSourceValueName() ); if ( valueMeta == null ) { throw new KettleStepException( BaseMessages.getString( PKG, "SimpleMappingMeta.Exception.UnableToFindField", valueRename.getSourceValueName() ) ); } valueMeta.setName( valueRename.getTargetValueName() ); } } // What is this mapping input step? // StepMeta mappingInputStep = mappingTransMeta.findMappingInputStep( null ); // We're certain it's a MappingInput step... // MappingInputMeta mappingInputMeta = (MappingInputMeta) mappingInputStep.getStepMetaInterface(); // Inform the mapping input step about what it's going to receive... // mappingInputMeta.setInputRowMeta( inputRowMeta ); // What values are we changing names for: already done! // mappingInputMeta.setValueRenames( null ); // Keep a list of the input rename values that need to be changed back at // the output // if ( inputMapping.isRenamingOnOutput() ) { SimpleMapping.addInputRenames( inputRenameList, inputMapping.getValueRenames() ); } StepMeta mappingOutputStep = mappingTransMeta.findMappingOutputStep( null ); // We know it's a mapping output step... MappingOutputMeta mappingOutputMeta = (MappingOutputMeta) mappingOutputStep.getStepMetaInterface(); // Change a few columns. mappingOutputMeta.setOutputValueRenames( outputMapping.getValueRenames() ); // Perhaps we need to change a few input columns back to the original? // mappingOutputMeta.setInputValueRenames( inputRenameList ); // Now we know wat's going to come out of there... // This is going to be the full row, including all the remapping, etc. // RowMetaInterface mappingOutputRowMeta = mappingTransMeta.getStepFields( mappingOutputStep ); row.clear(); row.addRowMeta( mappingOutputRowMeta ); }
Example 19
Source File: CPythonScriptExecutorMeta.java From pentaho-cpython-plugin with Apache License 2.0 | 3 votes |
/** * Add all incoming fields to the output row meta in the case where no output fields have been defined/edited by * the user * * @param rowMeta * @param stepName * @param info */ private void addAllIncomingFieldsToOutput( RowMetaInterface rowMeta, String stepName, RowMetaInterface[] info ) { if ( getIncludeInputAsOutput() ) { for ( RowMetaInterface r : info ) { rowMeta.addRowMeta( r ); } } }