Java Code Examples for org.pentaho.di.core.row.RowMetaInterface#searchValueMeta()
The following examples show how to use
org.pentaho.di.core.row.RowMetaInterface#searchValueMeta() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DeleteMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( prev != null ) { // Lookup: we do a lookup on the natural keys for ( int i = 0; i < keyLookup.length; i++ ) { ValueMetaInterface v = prev.searchValueMeta( keyStream[i] ); DatabaseImpact ii = new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_DELETE, transMeta.getName(), stepMeta.getName(), databaseMeta .getDatabaseName(), tableName, keyLookup[i], keyStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() ); impact.add( ii ); } } }
Example 2
Source File: MySQLBulkLoaderMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( prev != null ) { /* DEBUG CHECK THIS */ // Insert dateMask fields : read/write for ( int i = 0; i < fieldTable.length; i++ ) { ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] ); DatabaseImpact ii = new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta.getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i], fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() ); impact.add( ii ); } } }
Example 3
Source File: OraBulkLoaderMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( prev != null ) { /* DEBUG CHECK THIS */ // Insert dateMask fields : read/write for ( int i = 0; i < fieldTable.length; i++ ) { ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] ); DatabaseImpact ii = new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta .getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i], fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() ); impact.add( ii ); } } }
Example 4
Source File: MonetDBBulkLoaderMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( prev != null ) { /* DEBUG CHECK THIS */ // Insert dateMask fields : read/write for ( int i = 0; i < fieldTable.length; i++ ) { ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] ); DatabaseImpact ii = new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta .getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i], fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() ); impact.add( ii ); } } }
Example 5
Source File: GPLoadMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( prev != null ) { /* DEBUG CHECK THIS */ // Insert dateMask fields : read/write for ( int i = 0; i < fieldTable.length; i++ ) { ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] ); DatabaseImpact ii = new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta.getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i], fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() ); impact.add( ii ); } } }
Example 6
Source File: DatabaseLookup.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@VisibleForTesting void determineFieldsTypesQueryingDb() throws KettleException { final String[] keyFields = meta.getTableKeyField(); data.keytypes = new int[ keyFields.length ]; String schemaTable = meta.getDatabaseMeta().getQuotedSchemaTableCombination( environmentSubstitute( meta.getSchemaName() ), environmentSubstitute( meta.getTablename() ) ); RowMetaInterface fields = data.db.getTableFields( schemaTable ); if ( fields != null ) { // Fill in the types... for ( int i = 0; i < keyFields.length; i++ ) { ValueMetaInterface key = fields.searchValueMeta( keyFields[ i ] ); if ( key != null ) { data.keytypes[ i ] = key.getType(); } else { throw new KettleStepException( BaseMessages.getString( PKG, "DatabaseLookup.ERROR0001.FieldRequired5.Exception" ) + keyFields[ i ] + BaseMessages.getString( PKG, "DatabaseLookup.ERROR0001.FieldRequired6.Exception" ) ); } } if ( shouldDatabaseReturnValueTypeBeUsed() ) { useReturnValueTypeFromDatabase( fields ); } } else { throw new KettleStepException( BaseMessages.getString( PKG, "DatabaseLookup.ERROR0002.UnableToDetermineFieldsOfTable" ) + schemaTable + "]" ); } }
Example 7
Source File: BeamTimestampStepHandler.java From kettle-beam with Apache License 2.0 | 5 votes |
@Override public void handleStep( LogChannelInterface log, StepMeta stepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap, Pipeline pipeline, RowMetaInterface rowMeta, List<StepMeta> previousSteps, PCollection<KettleRow> input ) throws KettleException { BeamTimestampMeta beamTimestampMeta = (BeamTimestampMeta) stepMeta.getStepMetaInterface(); if ( !beamTimestampMeta.isReadingTimestamp() && StringUtils.isNotEmpty( beamTimestampMeta.getFieldName() ) ) { if ( rowMeta.searchValueMeta( beamTimestampMeta.getFieldName() ) == null ) { throw new KettleException( "Please specify a valid field name '" + stepMeta.getName() + "'" ); } } PCollection<KettleRow> stepPCollection = input.apply( ParDo.of( new TimestampFn( stepMeta.getName(), JsonRowMeta.toJson( rowMeta ), transMeta.environmentSubstitute( beamTimestampMeta.getFieldName() ), beamTimestampMeta.isReadingTimestamp(), stepPluginClasses, xpPluginClasses ) ) ); // Save this in the map // stepCollectionMap.put( stepMeta.getName(), stepPCollection ); log.logBasic( "Handled step (TIMESTAMP) : " + stepMeta.getName() + ", gets data from " + previousSteps.size() + " previous step(s)" ); }
Example 8
Source File: FieldHelper.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public FieldHelper( RowMetaInterface rowMeta, String fieldName ) { this.meta = rowMeta.searchValueMeta( fieldName ); this.index = rowMeta.indexOfValue( fieldName ); if ( this.index == -1 ) { throw new IllegalArgumentException( String.format( "FieldHelper could not be initialized. The field named '%s' not found in RowMeta: %s", fieldName, rowMeta.toStringMeta() ) ); } }
Example 9
Source File: TextFileOutputMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // No values are added to the row in this type of step // However, in case of Fixed length records, // the field precisions and lengths are altered! for ( int i = 0; i < outputFields.length; i++ ) { TextFileField field = outputFields[i]; ValueMetaInterface v = row.searchValueMeta( field.getName() ); if ( v != null ) { v.setLength( field.getLength() ); v.setPrecision( field.getPrecision() ); if ( field.getFormat() != null ) { v.setConversionMask( field.getFormat() ); } v.setDecimalSymbol( field.getDecimalSymbol() ); v.setGroupingSymbol( field.getGroupingSymbol() ); v.setCurrencySymbol( field.getCurrencySymbol() ); v.setOutputPaddingEnabled( isPadded() ); v.setTrimType( field.getTrimType() ); if ( !Utils.isEmpty( getEncoding() ) ) { v.setStringEncoding( getEncoding() ); } // enable output padding by default to be compatible with v2.5.x // v.setOutputPaddingEnabled( true ); } } }
Example 10
Source File: MergeJoin.java From pentaho-kettle with Apache License 2.0 | 5 votes |
/** * Checks whether incoming rows are join compatible. This essentially means that the keys being compared should be of * the same datatype and both rows should have the same number of keys specified * * @param row1 * Reference row * @param row2 * Row to compare to * * @return true when templates are compatible. */ protected boolean isInputLayoutValid( RowMetaInterface row1, RowMetaInterface row2 ) { if ( row1 != null && row2 != null ) { // Compare the key types String[] keyFields1 = meta.getKeyFields1(); int nrKeyFields1 = keyFields1.length; String[] keyFields2 = meta.getKeyFields2(); int nrKeyFields2 = keyFields2.length; if ( nrKeyFields1 != nrKeyFields2 ) { logError( "Number of keys do not match " + nrKeyFields1 + " vs " + nrKeyFields2 ); return false; } for ( int i = 0; i < nrKeyFields1; i++ ) { ValueMetaInterface v1 = row1.searchValueMeta( keyFields1[i] ); if ( v1 == null ) { return false; } ValueMetaInterface v2 = row2.searchValueMeta( keyFields2[i] ); if ( v2 == null ) { return false; } if ( v1.getType() != v2.getType() ) { return false; } } } // we got here, all seems to be ok. return true; }
Example 11
Source File: DataSet.java From pentaho-pdi-dataset with Apache License 2.0 | 5 votes |
/** * Calculate the row metadata for the data set fields needed for the given location. * * @param location * @return The fields metadata for those fields that are mapped against a certain step (location) */ public RowMetaInterface getMappedDataSetFieldsRowMeta( TransUnitTestSetLocation location ) throws KettlePluginException { RowMetaInterface setRowMeta = getSetRowMeta( false ); RowMetaInterface rowMeta = new RowMeta(); for ( TransUnitTestFieldMapping fieldMapping : location.getFieldMappings() ) { ValueMetaInterface valueMeta = setRowMeta.searchValueMeta( fieldMapping.getDataSetFieldName() ); rowMeta.addValueMeta( valueMeta ); } return rowMeta; }
Example 12
Source File: ClosureGeneratorMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // The output for the closure table is: // // - parentId // - childId // - distance // // Nothing else. // RowMetaInterface result = new RowMeta(); ValueMetaInterface parentValueMeta = row.searchValueMeta( parentIdFieldName ); if ( parentValueMeta != null ) { result.addValueMeta( parentValueMeta ); } ValueMetaInterface childValueMeta = row.searchValueMeta( childIdFieldName ); if ( childValueMeta != null ) { result.addValueMeta( childValueMeta ); } ValueMetaInterface distanceValueMeta = new ValueMetaInteger( distanceFieldName ); distanceValueMeta.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH ); result.addValueMeta( distanceValueMeta ); row.clear(); row.addRowMeta( result ); }
Example 13
Source File: NormaliserMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // Get a unique list of the occurrences of the type // List<String> norm_occ = new ArrayList<>(); List<String> field_occ = new ArrayList<>(); int maxlen = 0; for ( int i = 0; i < normaliserFields.length; i++ ) { if ( !norm_occ.contains( normaliserFields[i].getNorm() ) ) { norm_occ.add( normaliserFields[i].getNorm() ); field_occ.add( normaliserFields[i].getName() ); } if ( normaliserFields[i].getValue().length() > maxlen ) { maxlen = normaliserFields[i].getValue().length(); } } // Then add the type field! // ValueMetaInterface typefield_value = new ValueMetaString( typeField ); typefield_value.setOrigin( name ); typefield_value.setLength( maxlen ); row.addValueMeta( typefield_value ); // Loop over the distinct list of fieldNorm[i] // Add the new fields that need to be created. // Use the same data type as the original fieldname... // for ( int i = 0; i < norm_occ.size(); i++ ) { String normname = norm_occ.get( i ); String fieldname = field_occ.get( i ); ValueMetaInterface v = row.searchValueMeta( fieldname ); if ( v != null ) { v = v.clone(); } else { throw new KettleStepException( BaseMessages.getString( PKG, "NormaliserMeta.Exception.UnableToFindField", fieldname ) ); } v.setName( normname ); v.setOrigin( name ); row.addValueMeta( v ); } // Now remove all the normalized fields... // for ( int i = 0; i < normaliserFields.length; i++ ) { int idx = row.indexOfValue( normaliserFields[i].getName() ); if ( idx >= 0 ) { row.removeValueMeta( idx ); } } }
Example 14
Source File: LucidDBStreamingLoaderMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private String buildRemoteRowsCursorFromInput( RowMetaInterface prev ) throws KettleStepException { boolean suppress_comma = true; StringBuffer sb = new StringBuffer( 300 ); // Iterate over fieldStreamForKeys[] for ( int i = 0; i < fieldStreamForKeys.length; i++ ) { // Add comma to all except the first row if ( suppress_comma == true ) { suppress_comma = false; } else { sb.append( "," ); } String keyStreamFieldName = fieldStreamForKeys[i]; ValueMetaInterface keyStreamField = prev.searchValueMeta( fieldStreamForKeys[i] ); if ( keyStreamField == null ) { throw new KettleStepException( "Unable to find key field '" + keyStreamFieldName + "' in the input fields" ); } sb.append( buildFakeCursorRowString( keyStreamField, keyStreamFieldName ) ).append( Const.CR ); } // Iterate over fieldStreamForFields[] (dedup) for ( int i = 0; i < fieldStreamForFields.length; i++ ) { // Do not add if it's already in from keys if ( !isInKeys( fieldStreamForFields[i] ) ) { // Add comma to all except the first row if ( suppress_comma == true ) { suppress_comma = false; } else { sb.append( "," ); } sb.append( buildFakeCursorRowString( prev.searchValueMeta( fieldStreamForFields[i] ), fieldStreamForFields[i] ) + Const.CR ); } } return sb.toString(); }
Example 15
Source File: LucidDBBulkLoaderMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore ) throws KettleStepException { SQLStatement retval = new SQLStatement( stepMeta.getName(), databaseMeta, null ); // default: nothing to do! if ( databaseMeta != null ) { if ( prev != null && prev.size() > 0 ) { // Copy the row RowMetaInterface tableFields = new RowMeta(); // Now change the field names for ( int i = 0; i < fieldTable.length; i++ ) { ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] ); if ( v != null ) { ValueMetaInterface tableField = v.clone(); tableField.setName( fieldTable[i] ); tableFields.addValueMeta( tableField ); } else { throw new KettleStepException( "Unable to find field [" + fieldStream[i] + "] in the input rows" ); } } if ( !Utils.isEmpty( tableName ) ) { Database db = new Database( loggingObject, databaseMeta ); db.shareVariablesWith( transMeta ); try { db.connect(); String schemaTable = databaseMeta.getQuotedSchemaTableCombination( transMeta.environmentSubstitute( schemaName ), transMeta.environmentSubstitute( tableName ) ); String sql = db.getDDL( schemaTable, tableFields, null, false, null, true ); if ( Utils.isEmpty( sql ) ) { retval.setSQL( null ); } else { retval.setSQL( sql ); } } catch ( KettleException e ) { retval.setError( BaseMessages.getString( PKG, "LucidDBBulkLoaderMeta.GetSQL.ErrorOccurred" ) + e.getMessage() ); } } else { retval .setError( BaseMessages.getString( PKG, "LucidDBBulkLoaderMeta.GetSQL.NoTableDefinedOnConnection" ) ); } } else { retval.setError( BaseMessages.getString( PKG, "LucidDBBulkLoaderMeta.GetSQL.NotReceivingAnyFields" ) ); } } else { retval.setError( BaseMessages.getString( PKG, "LucidDBBulkLoaderMeta.GetSQL.NoConnectionDefined" ) ); } return retval; }
Example 16
Source File: GPLoadMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore ) throws KettleStepException { SQLStatement retval = new SQLStatement( stepMeta.getName(), databaseMeta, null ); // default: nothing to do! if ( databaseMeta != null ) { if ( prev != null && prev.size() > 0 ) { // Copy the row RowMetaInterface tableFields = new RowMeta(); // Now change the field names for ( int i = 0; i < fieldTable.length; i++ ) { ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] ); if ( v != null ) { ValueMetaInterface tableField = v.clone(); tableField.setName( fieldTable[i] ); tableFields.addValueMeta( tableField ); } else { throw new KettleStepException( "Unable to find field [" + fieldStream[i] + "] in the input rows" ); } } if ( !Utils.isEmpty( tableName ) ) { Database db = new Database( loggingObject, databaseMeta ); db.shareVariablesWith( transMeta ); try { db.connect(); String schemaTable = databaseMeta.getQuotedSchemaTableCombination( transMeta.environmentSubstitute( schemaName ), transMeta .environmentSubstitute( tableName ) ); String sql = db.getDDL( schemaTable, tableFields, null, false, null, true ); if ( sql.length() == 0 ) { retval.setSQL( null ); } else { retval.setSQL( sql ); } } catch ( KettleException e ) { retval.setError( BaseMessages.getString( PKG, "GPLoadMeta.GetSQL.ErrorOccurred" ) + e.getMessage() ); } } else { retval.setError( BaseMessages.getString( PKG, "GPLoadMeta.GetSQL.NoTableDefinedOnConnection" ) ); } } else { retval.setError( BaseMessages.getString( PKG, "GPLoadMeta.GetSQL.NotReceivingAnyFields" ) ); } } else { retval.setError( BaseMessages.getString( PKG, "GPLoadMeta.GetSQL.NoConnectionDefined" ) ); } return retval; }
Example 17
Source File: ScriptMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public void getFields( RowMetaInterface row, String originStepname, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { for ( int i = 0; i < fieldname.length; i++ ) { if ( !Utils.isEmpty( fieldname[i] ) ) { String fieldName; int replaceIndex; int fieldType; if ( replace[i] ) { // Look up the field to replace... // if ( row.searchValueMeta( fieldname[i] ) == null && Utils.isEmpty( rename[i] ) ) { throw new KettleStepException( BaseMessages.getString( PKG, "ScriptMeta.Exception.FieldToReplaceNotFound", fieldname[i] ) ); } replaceIndex = row.indexOfValue( rename[i] ); // Change the data type to match what's specified... // fieldType = type[i]; fieldName = rename[i]; } else { replaceIndex = -1; fieldType = type[i]; if ( rename[i] != null && rename[i].length() != 0 ) { fieldName = rename[i]; } else { fieldName = fieldname[i]; } } try { ValueMetaInterface v = ValueMetaFactory.createValueMeta( fieldName, fieldType ); v.setLength( length[i] ); v.setPrecision( precision[i] ); v.setOrigin( originStepname ); if ( replace[i] && replaceIndex >= 0 ) { row.setValueMeta( replaceIndex, v ); } else { row.addValueMeta( v ); } } catch ( KettlePluginException e ) { // Ignore errors } } } }
Example 18
Source File: SimpleMappingMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // First load some interesting data... // Then see which fields get added to the row. // TransMeta mappingTransMeta = null; try { mappingTransMeta = loadMappingMeta( this, repository, metaStore, space, mappingParameters.isInheritingAllVariables() ); } catch ( KettleException e ) { throw new KettleStepException( BaseMessages.getString( PKG, "SimpleMappingMeta.Exception.UnableToLoadMappingTransformation" ), e ); } // The field structure may depend on the input parameters as well (think of parameter replacements in MDX queries // for instance) if ( mappingParameters != null && mappingTransMeta != null ) { // Just set the variables in the transformation statically. // This just means: set a number of variables or parameter values: // StepWithMappingMeta.activateParams( mappingTransMeta, mappingTransMeta, space, mappingTransMeta.listParameters(), mappingParameters.getVariable(), mappingParameters.getInputField(), mappingParameters.isInheritingAllVariables() ); } // Keep track of all the fields that need renaming... // List<MappingValueRename> inputRenameList = new ArrayList<MappingValueRename>(); // // Before we ask the mapping outputs anything, we should teach the mapping // input steps in the sub-transformation about the data coming in... // RowMetaInterface inputRowMeta; // The row metadata, what we pass to the mapping input step // definition.getOutputStep(), is "row" // However, we do need to re-map some fields... // inputRowMeta = row.clone(); if ( !inputRowMeta.isEmpty() ) { for ( MappingValueRename valueRename : inputMapping.getValueRenames() ) { ValueMetaInterface valueMeta = inputRowMeta.searchValueMeta( valueRename.getSourceValueName() ); if ( valueMeta == null ) { throw new KettleStepException( BaseMessages.getString( PKG, "SimpleMappingMeta.Exception.UnableToFindField", valueRename.getSourceValueName() ) ); } valueMeta.setName( valueRename.getTargetValueName() ); } } // What is this mapping input step? // StepMeta mappingInputStep = mappingTransMeta.findMappingInputStep( null ); // We're certain it's a MappingInput step... // MappingInputMeta mappingInputMeta = (MappingInputMeta) mappingInputStep.getStepMetaInterface(); // Inform the mapping input step about what it's going to receive... // mappingInputMeta.setInputRowMeta( inputRowMeta ); // What values are we changing names for: already done! // mappingInputMeta.setValueRenames( null ); // Keep a list of the input rename values that need to be changed back at // the output // if ( inputMapping.isRenamingOnOutput() ) { SimpleMapping.addInputRenames( inputRenameList, inputMapping.getValueRenames() ); } StepMeta mappingOutputStep = mappingTransMeta.findMappingOutputStep( null ); // We know it's a mapping output step... MappingOutputMeta mappingOutputMeta = (MappingOutputMeta) mappingOutputStep.getStepMetaInterface(); // Change a few columns. mappingOutputMeta.setOutputValueRenames( outputMapping.getValueRenames() ); // Perhaps we need to change a few input columns back to the original? // mappingOutputMeta.setInputValueRenames( inputRenameList ); // Now we know wat's going to come out of there... // This is going to be the full row, including all the remapping, etc. // RowMetaInterface mappingOutputRowMeta = mappingTransMeta.getStepFields( mappingOutputStep ); row.clear(); row.addRowMeta( mappingOutputRowMeta ); }
Example 19
Source File: OraBulkLoaderMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore ) throws KettleStepException { SQLStatement retval = new SQLStatement( stepMeta.getName(), databaseMeta, null ); // default: nothing to do! if ( databaseMeta != null ) { if ( prev != null && prev.size() > 0 ) { // Copy the row RowMetaInterface tableFields = new RowMeta(); // Now change the field names for ( int i = 0; i < fieldTable.length; i++ ) { ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] ); if ( v != null ) { ValueMetaInterface tableField = v.clone(); tableField.setName( fieldTable[i] ); tableFields.addValueMeta( tableField ); } else { throw new KettleStepException( "Unable to find field [" + fieldStream[i] + "] in the input rows" ); } } if ( !Utils.isEmpty( tableName ) ) { Database db = new Database( loggingObject, databaseMeta ); db.shareVariablesWith( transMeta ); try { db.connect(); String schemaTable = databaseMeta.getQuotedSchemaTableCombination( transMeta.environmentSubstitute( schemaName ), transMeta.environmentSubstitute( tableName ) ); String sql = db.getDDL( schemaTable, tableFields, null, false, null, true ); if ( sql.length() == 0 ) { retval.setSQL( null ); } else { retval.setSQL( sql ); } } catch ( KettleException e ) { retval.setError( BaseMessages.getString( PKG, "OraBulkLoaderMeta.GetSQL.ErrorOccurred" ) + e.getMessage() ); } } else { retval.setError( BaseMessages.getString( PKG, "OraBulkLoaderMeta.GetSQL.NoTableDefinedOnConnection" ) ); } } else { retval.setError( BaseMessages.getString( PKG, "OraBulkLoaderMeta.GetSQL.NotReceivingAnyFields" ) ); } } else { retval.setError( BaseMessages.getString( PKG, "OraBulkLoaderMeta.GetSQL.NoConnectionDefined" ) ); } return retval; }
Example 20
Source File: MySQLBulkLoaderMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore ) throws KettleStepException { SQLStatement retval = new SQLStatement( stepMeta.getName(), databaseMeta, null ); // default: nothing to do! if ( databaseMeta != null ) { if ( prev != null && prev.size() > 0 ) { // Copy the row RowMetaInterface tableFields = new RowMeta(); // Now change the field names for ( int i = 0; i < fieldTable.length; i++ ) { ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] ); if ( v != null ) { ValueMetaInterface tableField = v.clone(); tableField.setName( fieldTable[i] ); tableFields.addValueMeta( tableField ); } else { throw new KettleStepException( "Unable to find field [" + fieldStream[i] + "] in the input rows" ); } } if ( !Utils.isEmpty( tableName ) ) { Database db = new Database( loggingObject, databaseMeta ); db.shareVariablesWith( transMeta ); try { db.connect(); String schemaTable = databaseMeta.getQuotedSchemaTableCombination( transMeta.environmentSubstitute( schemaName ), transMeta .environmentSubstitute( tableName ) ); String cr_table = db.getDDL( schemaTable, tableFields, null, false, null, true ); String sql = cr_table; if ( sql.length() == 0 ) { retval.setSQL( null ); } else { retval.setSQL( sql ); } } catch ( KettleException e ) { retval .setError( BaseMessages.getString( PKG, "MySQLBulkLoaderMeta.GetSQL.ErrorOccurred" ) + e.getMessage() ); } } else { retval.setError( BaseMessages.getString( PKG, "MySQLBulkLoaderMeta.GetSQL.NoTableDefinedOnConnection" ) ); } } else { retval.setError( BaseMessages.getString( PKG, "MySQLBulkLoaderMeta.GetSQL.NotReceivingAnyFields" ) ); } } else { retval.setError( BaseMessages.getString( PKG, "MySQLBulkLoaderMeta.GetSQL.NoConnectionDefined" ) ); } return retval; }