Java Code Examples for org.pentaho.di.core.row.RowMetaInterface#clear()
The following examples show how to use
org.pentaho.di.core.row.RowMetaInterface#clear() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JoinRowsMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Override public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( space instanceof TransMeta ) { TransMeta transMeta = (TransMeta) space; StepMeta[] steps = transMeta.getPrevSteps( transMeta.findStep( origin ) ); StepMeta mainStep = transMeta.findStep( getMainStepname() ); rowMeta.clear(); if ( mainStep != null ) { rowMeta.addRowMeta( transMeta.getStepFields( mainStep ) ); } for ( StepMeta step : steps ) { if ( mainStep == null || !step.equals( mainStep ) ) { rowMeta.addRowMeta( transMeta.getStepFields( step ) ); } } } }
Example 2
Source File: SingleThreaderMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // First load some interesting data... // // Then see which fields get added to the row. // TransMeta mappingTransMeta = null; try { mappingTransMeta = loadSingleThreadedTransMeta( this, repository, space ); } catch ( KettleException e ) { throw new KettleStepException( BaseMessages.getString( PKG, "SingleThreaderMeta.Exception.UnableToLoadMappingTransformation" ), e ); } row.clear(); // Let's keep it simple! // if ( !Utils.isEmpty( space.environmentSubstitute( retrieveStep ) ) ) { RowMetaInterface stepFields = mappingTransMeta.getStepFields( retrieveStep ); row.addRowMeta( stepFields ); } }
Example 3
Source File: TransformClassBase.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@SuppressWarnings( "unchecked" ) public static void getFields( boolean clearResultFields, RowMetaInterface row, String originStepname, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, List<?> fields ) throws KettleStepException { if ( clearResultFields ) { row.clear(); } for ( FieldInfo fi : (List<FieldInfo>) fields ) { try { ValueMetaInterface v = ValueMetaFactory.createValueMeta( fi.name, fi.type ); v.setLength( fi.length ); v.setPrecision( fi.precision ); v.setOrigin( originStepname ); row.addValueMeta( v ); } catch ( Exception e ) { throw new KettleStepException( e ); } } }
Example 4
Source File: BeamBQOutputMeta.java From kettle-beam with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // This is an endpoint in Beam, produces no further output // inputRowMeta.clear(); }
Example 5
Source File: AnalyticQueryMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void getFields( RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // re-assemble a new row of metadata // RowMetaInterface fields = new RowMeta(); // Add existing values fields.addRowMeta( r ); // add analytic values for ( int i = 0; i < number_of_fields; i++ ) { int index_of_subject = -1; index_of_subject = r.indexOfValue( subjectField[i] ); // if we found the subjectField in the RowMetaInterface, and we should.... if ( index_of_subject > -1 ) { ValueMetaInterface vmi = r.getValueMeta( index_of_subject ).clone(); vmi.setOrigin( origin ); vmi.setName( aggregateField[i] ); fields.addValueMeta( r.size() + i, vmi ); } else { // we have a condition where the subjectField can't be found from the rowMetaInterface StringBuilder sbfieldNames = new StringBuilder(); String[] fieldNames = r.getFieldNames(); for ( int j = 0; j < fieldNames.length; j++ ) { sbfieldNames.append( "[" + fieldNames[j] + "]" + ( j < fieldNames.length - 1 ? ", " : "" ) ); } throw new KettleStepException( BaseMessages.getString( PKG, "AnalyticQueryMeta.Exception.SubjectFieldNotFound", getParentStepMeta().getName(), subjectField[i], sbfieldNames.toString() ) ); } } r.clear(); // Add back to Row Meta r.addRowMeta( fields ); }
Example 6
Source File: MetaInjectMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { rowMeta.clear(); // No defined output is expected from this step. if ( !Utils.isEmpty( sourceStepName ) ) { for ( MetaInjectOutputField field : sourceOutputFields ) { try { rowMeta.addValueMeta( field.createValueMeta() ); } catch ( KettlePluginException e ) { throw new KettleStepException( "Error creating value meta for output field '" + field.getName() + "'", e ); } } } }
Example 7
Source File: ConcatFieldsMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // do not call the super class from TextFileOutputMeta since it modifies the source meta data // see getFieldsModifyInput() instead // remove selected fields from the stream when true if ( removeSelectedFields ) { if ( getOutputFields().length > 0 ) { for ( int i = 0; i < getOutputFields().length; i++ ) { TextFileField field = getOutputFields()[ i ]; try { row.removeValueMeta( field.getName() ); } catch ( KettleValueException e ) { // just ignore exceptions since missing fields are handled in the ConcatFields class } } } else { // no output fields selected, take them all, remove them all row.clear(); } } // Check Target Field Name if ( Utils.isEmpty( targetFieldName ) ) { throw new KettleStepException( BaseMessages.getString( PKG, "ConcatFieldsMeta.CheckResult.TargetFieldNameMissing" ) ); } // add targetFieldName ValueMetaInterface vValue = new ValueMetaString( targetFieldName ); vValue.setLength( targetFieldLength, 0 ); vValue.setOrigin( name ); if ( !Utils.isEmpty( getEncoding() ) ) { vValue.setStringEncoding( getEncoding() ); } row.addValueMeta( vValue ); }
Example 8
Source File: SalesforceUpdateMetaTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void testGetFields() throws KettleStepException { SalesforceUpdateMeta meta = new SalesforceUpdateMeta(); meta.setDefault(); RowMetaInterface r = new RowMeta(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 0, r.size() ); r.clear(); r.addValueMeta( new ValueMetaString( "testString" ) ); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( ValueMetaInterface.TYPE_STRING, r.getValueMeta( 0 ).getType() ); assertEquals( "testString", r.getValueMeta( 0 ).getName() ); }
Example 9
Source File: RulesExecutorMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( !keepInputFields ) { row.clear(); } if ( ruleResultColumns != null ) { for ( int i = 0; i < ruleResultColumns.size(); i++ ) { ruleResultColumns.get( i ).setOrigin( origin ); row.addValueMeta( ruleResultColumns.get( i ) ); } } }
Example 10
Source File: ClosureGeneratorMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // The output for the closure table is: // // - parentId // - childId // - distance // // Nothing else. // RowMetaInterface result = new RowMeta(); ValueMetaInterface parentValueMeta = row.searchValueMeta( parentIdFieldName ); if ( parentValueMeta != null ) { result.addValueMeta( parentValueMeta ); } ValueMetaInterface childValueMeta = row.searchValueMeta( childIdFieldName ); if ( childValueMeta != null ) { result.addValueMeta( childValueMeta ); } ValueMetaInterface distanceValueMeta = new ValueMetaInteger( distanceFieldName ); distanceValueMeta.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH ); result.addValueMeta( distanceValueMeta ); row.clear(); row.addRowMeta( result ); }
Example 11
Source File: ExecuteTestsMeta.java From pentaho-pdi-dataset with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { RowMetaInterface rowMeta = UnitTestResult.getRowMeta(); int index = 0; rowMeta.getValueMeta( index++ ).setName( space.environmentSubstitute( transformationNameField ) ); rowMeta.getValueMeta( index++ ).setName( space.environmentSubstitute( unitTestNameField ) ); rowMeta.getValueMeta( index++ ).setName( space.environmentSubstitute( dataSetNameField ) ); rowMeta.getValueMeta( index++ ).setName( space.environmentSubstitute( stepNameField ) ); rowMeta.getValueMeta( index++ ).setName( space.environmentSubstitute( errorField ) ); rowMeta.getValueMeta( index++ ).setName( space.environmentSubstitute( commentField ) ); inputRowMeta.clear(); inputRowMeta.addRowMeta( rowMeta ); }
Example 12
Source File: SalesforceInsertMetaTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void testGetFields() throws KettleStepException { SalesforceInsertMeta meta = new SalesforceInsertMeta(); meta.setDefault(); RowMetaInterface r = new RowMeta(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( "Id", r.getFieldNames()[0] ); meta.setSalesforceIDFieldName( "id_field" ); r.clear(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( "id_field", r.getFieldNames()[0] ); }
Example 13
Source File: RulesAccumulatorMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( !keepInputFields ) { row.clear(); } if ( ruleResultColumns != null ) { for ( int i = 0; i < ruleResultColumns.size(); i++ ) { ruleResultColumns.get( i ).setOrigin( origin ); row.addValueMeta( ruleResultColumns.get( i ) ); } } }
Example 14
Source File: BeamInputMeta.java From kettle-beam with Apache License 2.0 | 5 votes |
@Override public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { if (metaStore!=null) { FileDefinition fileDefinition = loadFileDefinition( metaStore ); try { inputRowMeta.clear(); inputRowMeta.addRowMeta( fileDefinition.getRowMeta() ); } catch ( KettlePluginException e ) { throw new KettleStepException( "Unable to get row layout of file definition '" + fileDefinition.getName() + "'", e ); } } }
Example 15
Source File: SalesforceInputMetaTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void testGetFields() throws KettleStepException { SalesforceInputMeta meta = new SalesforceInputMeta(); meta.setDefault(); RowMetaInterface r = new RowMeta(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 0, r.size() ); meta.setInputFields( new SalesforceInputField[]{ new SalesforceInputField( "field1" ) } ); r.clear(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); meta.setIncludeDeletionDate( true ); meta.setDeletionDateField( "DeletionDate" ); meta.setIncludeModule( true ); meta.setModuleField( "ModuleName" ); meta.setIncludeRowNumber( true ); meta.setRowNumberField( "RN" ); meta.setIncludeSQL( true ); meta.setSQLField( "sqlField" ); meta.setIncludeTargetURL( true ); meta.setTargetURLField( "Target" ); meta.setIncludeTimestamp( true ); meta.setTimestampField( "TS" ); r.clear(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 7, r.size() ); assertTrue( r.indexOfValue( "field1" ) >= 0 ); assertTrue( r.indexOfValue( "DeletionDate" ) >= 0 ); assertTrue( r.indexOfValue( "ModuleName" ) >= 0 ); assertTrue( r.indexOfValue( "RN" ) >= 0 ); assertTrue( r.indexOfValue( "sqlField" ) >= 0 ); assertTrue( r.indexOfValue( "Target" ) >= 0 ); assertTrue( r.indexOfValue( "TS" ) >= 0 ); }
Example 16
Source File: SalesforceDeleteMetaTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void testGetFields() throws KettleStepException { SalesforceDeleteMeta meta = new SalesforceDeleteMeta(); meta.setDefault(); RowMetaInterface r = new RowMeta(); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 0, r.size() ); r.clear(); r.addValueMeta( new ValueMetaString( "testString" ) ); meta.getFields( r, "thisStep", null, null, new Variables(), null, null ); assertEquals( 1, r.size() ); assertEquals( ValueMetaInterface.TYPE_STRING, r.getValueMeta( 0 ).getType() ); assertEquals( "testString", r.getValueMeta( 0 ).getName() ); }
Example 17
Source File: ParGzipCsvInputMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { try { rowMeta.clear(); // Start with a clean slate, eats the input for ( int i = 0; i < inputFields.length; i++ ) { TextFileInputField field = inputFields[i]; ValueMetaInterface valueMeta = ValueMetaFactory.createValueMeta( field.getName(), field.getType() ); valueMeta.setConversionMask( field.getFormat() ); valueMeta.setLength( field.getLength() ); valueMeta.setPrecision( field.getPrecision() ); valueMeta.setConversionMask( field.getFormat() ); valueMeta.setDecimalSymbol( field.getDecimalSymbol() ); valueMeta.setGroupingSymbol( field.getGroupSymbol() ); valueMeta.setCurrencySymbol( field.getCurrencySymbol() ); valueMeta.setTrimType( field.getTrimType() ); if ( lazyConversionActive ) { valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); } valueMeta.setStringEncoding( space.environmentSubstitute( encoding ) ); // In case we want to convert Strings... // Using a copy of the valueMeta object means that the inner and outer representation format is the same. // Preview will show the data the same way as we read it. // This layout is then taken further down the road by the metadata through the transformation. // ValueMetaInterface storageMetadata = ValueMetaFactory.cloneValueMeta( valueMeta, ValueMetaInterface.TYPE_STRING ); storageMetadata.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); storageMetadata.setLength( -1, -1 ); // we don't really know the lengths of the strings read in advance. valueMeta.setStorageMetadata( storageMetadata ); valueMeta.setOrigin( origin ); rowMeta.addValueMeta( valueMeta ); } if ( !Utils.isEmpty( filenameField ) && includingFilename ) { ValueMetaInterface filenameMeta = new ValueMetaString( filenameField ); filenameMeta.setOrigin( origin ); if ( lazyConversionActive ) { filenameMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); filenameMeta.setStorageMetadata( new ValueMetaString( filenameField ) ); } rowMeta.addValueMeta( filenameMeta ); } if ( !Utils.isEmpty( rowNumField ) ) { ValueMetaInterface rowNumMeta = new ValueMetaInteger( rowNumField ); rowNumMeta.setLength( 10 ); rowNumMeta.setOrigin( origin ); rowMeta.addValueMeta( rowNumMeta ); } } catch ( Exception e ) { throw new KettleStepException( e ); } }
Example 18
Source File: StepMetastructureMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // we create a new output row structure - clear r r.clear(); this.setDefault(); // create the new fields // Position ValueMetaInterface positionFieldValue = new ValueMetaInteger( positionName ); positionFieldValue.setOrigin( name ); r.addValueMeta( positionFieldValue ); // field name ValueMetaInterface nameFieldValue = new ValueMetaString( fieldName ); nameFieldValue.setOrigin( name ); r.addValueMeta( nameFieldValue ); // comments ValueMetaInterface commentsFieldValue = new ValueMetaString( comments ); nameFieldValue.setOrigin( name ); r.addValueMeta( commentsFieldValue ); // Type ValueMetaInterface typeFieldValue = new ValueMetaString( typeName ); typeFieldValue.setOrigin( name ); r.addValueMeta( typeFieldValue ); // Length ValueMetaInterface lengthFieldValue = new ValueMetaInteger( lengthName ); lengthFieldValue.setOrigin( name ); r.addValueMeta( lengthFieldValue ); // Precision ValueMetaInterface precisionFieldValue = new ValueMetaInteger( precisionName ); precisionFieldValue.setOrigin( name ); r.addValueMeta( precisionFieldValue ); // Origin ValueMetaInterface originFieldValue = new ValueMetaString( originName ); originFieldValue.setOrigin( name ); r.addValueMeta( originFieldValue ); if ( isOutputRowcount() ) { // RowCount ValueMetaInterface v = new ValueMetaInteger( this.getRowcountField() ); v.setOrigin( name ); r.addValueMeta( v ); } }
Example 19
Source File: SimpleMappingMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // First load some interesting data... // Then see which fields get added to the row. // TransMeta mappingTransMeta = null; try { mappingTransMeta = loadMappingMeta( this, repository, metaStore, space, mappingParameters.isInheritingAllVariables() ); } catch ( KettleException e ) { throw new KettleStepException( BaseMessages.getString( PKG, "SimpleMappingMeta.Exception.UnableToLoadMappingTransformation" ), e ); } // The field structure may depend on the input parameters as well (think of parameter replacements in MDX queries // for instance) if ( mappingParameters != null && mappingTransMeta != null ) { // Just set the variables in the transformation statically. // This just means: set a number of variables or parameter values: // StepWithMappingMeta.activateParams( mappingTransMeta, mappingTransMeta, space, mappingTransMeta.listParameters(), mappingParameters.getVariable(), mappingParameters.getInputField(), mappingParameters.isInheritingAllVariables() ); } // Keep track of all the fields that need renaming... // List<MappingValueRename> inputRenameList = new ArrayList<MappingValueRename>(); // // Before we ask the mapping outputs anything, we should teach the mapping // input steps in the sub-transformation about the data coming in... // RowMetaInterface inputRowMeta; // The row metadata, what we pass to the mapping input step // definition.getOutputStep(), is "row" // However, we do need to re-map some fields... // inputRowMeta = row.clone(); if ( !inputRowMeta.isEmpty() ) { for ( MappingValueRename valueRename : inputMapping.getValueRenames() ) { ValueMetaInterface valueMeta = inputRowMeta.searchValueMeta( valueRename.getSourceValueName() ); if ( valueMeta == null ) { throw new KettleStepException( BaseMessages.getString( PKG, "SimpleMappingMeta.Exception.UnableToFindField", valueRename.getSourceValueName() ) ); } valueMeta.setName( valueRename.getTargetValueName() ); } } // What is this mapping input step? // StepMeta mappingInputStep = mappingTransMeta.findMappingInputStep( null ); // We're certain it's a MappingInput step... // MappingInputMeta mappingInputMeta = (MappingInputMeta) mappingInputStep.getStepMetaInterface(); // Inform the mapping input step about what it's going to receive... // mappingInputMeta.setInputRowMeta( inputRowMeta ); // What values are we changing names for: already done! // mappingInputMeta.setValueRenames( null ); // Keep a list of the input rename values that need to be changed back at // the output // if ( inputMapping.isRenamingOnOutput() ) { SimpleMapping.addInputRenames( inputRenameList, inputMapping.getValueRenames() ); } StepMeta mappingOutputStep = mappingTransMeta.findMappingOutputStep( null ); // We know it's a mapping output step... MappingOutputMeta mappingOutputMeta = (MappingOutputMeta) mappingOutputStep.getStepMetaInterface(); // Change a few columns. mappingOutputMeta.setOutputValueRenames( outputMapping.getValueRenames() ); // Perhaps we need to change a few input columns back to the original? // mappingOutputMeta.setInputValueRenames( inputRenameList ); // Now we know wat's going to come out of there... // This is going to be the full row, including all the remapping, etc. // RowMetaInterface mappingOutputRowMeta = mappingTransMeta.getStepFields( mappingOutputStep ); row.clear(); row.addRowMeta( mappingOutputRowMeta ); }
Example 20
Source File: CsvInputMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { try { rowMeta.clear(); // Start with a clean slate, eats the input for ( int i = 0; i < inputFields.length; i++ ) { TextFileInputField field = inputFields[i]; ValueMetaInterface valueMeta = ValueMetaFactory.createValueMeta( field.getName(), field.getType() ); valueMeta.setConversionMask( field.getFormat() ); valueMeta.setLength( field.getLength() ); valueMeta.setPrecision( field.getPrecision() ); valueMeta.setConversionMask( field.getFormat() ); valueMeta.setDecimalSymbol( field.getDecimalSymbol() ); valueMeta.setGroupingSymbol( field.getGroupSymbol() ); valueMeta.setCurrencySymbol( field.getCurrencySymbol() ); valueMeta.setTrimType( field.getTrimType() ); if ( lazyConversionActive ) { valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); } valueMeta.setStringEncoding( space.environmentSubstitute( encoding ) ); // In case we want to convert Strings... // Using a copy of the valueMeta object means that the inner and outer representation format is the same. // Preview will show the data the same way as we read it. // This layout is then taken further down the road by the metadata through the transformation. // ValueMetaInterface storageMetadata = ValueMetaFactory.cloneValueMeta( valueMeta, ValueMetaInterface.TYPE_STRING ); storageMetadata.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); storageMetadata.setLength( -1, -1 ); // we don't really know the lengths of the strings read in advance. valueMeta.setStorageMetadata( storageMetadata ); valueMeta.setOrigin( origin ); rowMeta.addValueMeta( valueMeta ); } if ( !Utils.isEmpty( filenameField ) && includingFilename ) { ValueMetaInterface filenameMeta = new ValueMetaString( filenameField ); filenameMeta.setOrigin( origin ); if ( lazyConversionActive ) { filenameMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); filenameMeta.setStorageMetadata( new ValueMetaString( filenameField ) ); } rowMeta.addValueMeta( filenameMeta ); } if ( !Utils.isEmpty( rowNumField ) ) { ValueMetaInterface rowNumMeta = new ValueMetaInteger( rowNumField ); rowNumMeta.setLength( 10 ); rowNumMeta.setOrigin( origin ); rowMeta.addValueMeta( rowNumMeta ); } } catch ( Exception e ) { throw new KettleStepException( e ); } }