Java Code Examples for org.pentaho.di.core.row.ValueMetaInterface#setStorageType()
The following examples show how to use
org.pentaho.di.core.row.ValueMetaInterface#setStorageType() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SetValueConstant.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private void updateField( Object[] r ) throws Exception { // Loop through fields for ( int i = 0; i < data.getFieldnr(); i++ ) { // DO CONVERSION OF THE DEFAULT VALUE ... // Entered by user ValueMetaInterface targetValueMeta = data.getOutputRowMeta().getValueMeta( data.getFieldnrs()[i] ); ValueMetaInterface sourceValueMeta = data.getConvertRowMeta().getValueMeta( data.getFieldnrs()[i] ); if ( !Utils.isEmpty( meta.getField( i ).getReplaceMask() ) ) { sourceValueMeta.setConversionMask( meta.getField( i ).getReplaceMask() ); } sourceValueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); r[data.getFieldnrs()[i]] = targetValueMeta.convertData( sourceValueMeta, data.getRealReplaceByValues()[i] ); targetValueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); } }
Example 2
Source File: TableInputMetaTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Test public void testGetFields() throws Exception { TableInputMetaHandler meta = new TableInputMetaHandler(); meta.setLazyConversionActive( true ); DatabaseMeta dbMeta = mock( DatabaseMeta.class ); meta.setDatabaseMeta( dbMeta ); Database mockDB = meta.getDatabase(); when( mockDB.getQueryFields( anyString(), anyBoolean() ) ).thenReturn( createMockFields() ); RowMetaInterface expectedRowMeta = new RowMeta(); ValueMetaInterface valueMeta = new ValueMetaString( "field1" ); valueMeta.setStorageMetadata( new ValueMetaString( "field1" ) ); valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); expectedRowMeta.addValueMeta( valueMeta ); VariableSpace space = mock( VariableSpace.class ); RowMetaInterface rowMetaInterface = new RowMeta(); meta.getFields( rowMetaInterface, "TABLE_INPUT_META", null, null, space, null, null ); assertEquals( expectedRowMeta.toString(), rowMetaInterface.toString() ); }
Example 3
Source File: StringOperationsMetaTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Test public void testGetFields() throws Exception { StringOperationsMeta meta = new StringOperationsMeta(); meta.allocate( 1 ); meta.setFieldInStream( new String[] { "field1" } ); RowMetaInterface rowMetaInterface = new RowMeta(); ValueMetaInterface valueMeta = new ValueMetaString( "field1" ); valueMeta.setStorageMetadata( new ValueMetaString( "field1" ) ); valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); rowMetaInterface.addValueMeta( valueMeta ); VariableSpace space = mock( VariableSpace.class ); meta.getFields( rowMetaInterface, "STRING_OPERATIONS", null, null, space, null, null ); RowMetaInterface expectedRowMeta = new RowMeta(); expectedRowMeta.addValueMeta( new ValueMetaString( "field1" ) ); assertEquals( expectedRowMeta.toString(), rowMetaInterface.toString() ); }
Example 4
Source File: GoogleSpreadsheetInputMeta.java From pdi-google-spreadsheet-plugin with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override public void getFields(RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore) throws KettleStepException { try { inputRowMeta.clear(); // Start with a clean slate, eats the input for (TextFileInputField field : inputFields) { ValueMetaInterface valueMeta = ValueMetaFactory.createValueMeta(field.getName(), field.getType()); valueMeta.setConversionMask(field.getFormat()); valueMeta.setLength(field.getLength()); valueMeta.setPrecision(field.getPrecision()); valueMeta.setConversionMask(field.getFormat()); valueMeta.setDecimalSymbol(field.getDecimalSymbol()); valueMeta.setGroupingSymbol(field.getGroupSymbol()); valueMeta.setCurrencySymbol(field.getCurrencySymbol()); valueMeta.setTrimType(field.getTrimType()); valueMeta.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING); valueMeta.setDateFormatLenient(true); valueMeta.setStringEncoding("UTF-8"); ValueMetaInterface storageMetadata = ValueMetaFactory.cloneValueMeta(valueMeta, ValueMetaInterface.TYPE_STRING); storageMetadata.setStorageType(ValueMetaInterface.STORAGE_TYPE_NORMAL); storageMetadata.setLength(-1, -1); // we don't really know the lengths of the strings read in advance. valueMeta.setStorageMetadata(storageMetadata); valueMeta.setOrigin(name); inputRowMeta.addValueMeta(valueMeta); } } catch (Exception e) { } }
Example 5
Source File: FixedInputMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { try { for ( int i = 0; i < fieldDefinition.length; i++ ) { FixedFileInputField field = fieldDefinition[i]; ValueMetaInterface valueMeta = ValueMetaFactory.createValueMeta( field.getName(), field.getType() ); valueMeta.setConversionMask( field.getFormat() ); valueMeta.setTrimType( field.getTrimType() ); valueMeta.setLength( field.getLength() ); valueMeta.setPrecision( field.getPrecision() ); valueMeta.setConversionMask( field.getFormat() ); valueMeta.setDecimalSymbol( field.getDecimal() ); valueMeta.setGroupingSymbol( field.getGrouping() ); valueMeta.setCurrencySymbol( field.getCurrency() ); valueMeta.setStringEncoding( space.environmentSubstitute( encoding ) ); if ( lazyConversionActive ) { valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); } // In case we want to convert Strings... // ValueMetaInterface storageMetadata = ValueMetaFactory.cloneValueMeta( valueMeta, ValueMetaInterface.TYPE_STRING ); storageMetadata.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); valueMeta.setStorageMetadata( storageMetadata ); valueMeta.setOrigin( origin ); rowMeta.addValueMeta( valueMeta ); } } catch ( Exception e ) { throw new KettleStepException( e ); } }
Example 6
Source File: FixedInput.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (FixedInputMeta) smi; data = (FixedInputData) sdi; if ( first ) { first = false; data.outputRowMeta = new RowMeta(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); // The conversion logic for when the lazy conversion is turned of is simple: // Pretend it's a lazy conversion object anyway and get the native type during conversion. // data.convertRowMeta = data.outputRowMeta.clone(); for ( ValueMetaInterface valueMeta : data.convertRowMeta.getValueMetaList() ) { valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); } if ( meta.isHeaderPresent() ) { readOneRow( false ); // skip this row. } } Object[] outputRowData = readOneRow( true ); if ( outputRowData == null ) { // no more input to be expected... setOutputDone(); return false; } putRow( data.outputRowMeta, outputRowData ); // copy row to possible alternate rowset(s). if ( checkFeedback( getLinesInput() ) ) { logBasic( BaseMessages.getString( PKG, "FixedInput.Log.LineNumber", Long.toString( getLinesInput() ) ) ); } return true; }
Example 7
Source File: MemoryGroupBy.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private void initGroupMeta( RowMetaInterface previousRowMeta ) throws KettleValueException { data.groupMeta = new RowMeta(); data.entryMeta = new RowMeta(); for ( int i = 0; i < data.groupnrs.length; i++ ) { ValueMetaInterface valueMeta = previousRowMeta.getValueMeta( data.groupnrs[i] ); data.groupMeta.addValueMeta( valueMeta ); ValueMetaInterface normalMeta = valueMeta.clone(); normalMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); } return; }
Example 8
Source File: SortRowsMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@SuppressWarnings( "WeakerAccess" ) public void assignSortingCriteria( RowMetaInterface inputRowMeta ) { for ( int i = 0; i < fieldName.length; i++ ) { int idx = inputRowMeta.indexOfValue( fieldName[i] ); if ( idx >= 0 ) { ValueMetaInterface valueMeta = inputRowMeta.getValueMeta( idx ); // On all these valueMetas, check to see if the value actually exists before we try to // set them. if ( ascending.length > i ) { valueMeta.setSortedDescending( !ascending[i] ); } if ( caseSensitive.length > i ) { valueMeta.setCaseInsensitive( !caseSensitive[i] ); } if ( collatorEnabled.length > i ) { valueMeta.setCollatorDisabled( !collatorEnabled[i] ); } if ( collatorStrength.length > i ) { valueMeta.setCollatorStrength( collatorStrength[i] ); } // Also see if lazy conversion is active on these key fields. // If so we want to automatically convert them to the normal storage type. // This will improve performance, see also: PDI-346 // valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); valueMeta.setStorageMetadata( null ); } } }
Example 9
Source File: ValueMetaFactory.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public static void cloneInfo( ValueMetaInterface source, ValueMetaInterface target ) throws KettlePluginException { target.setConversionMask( source.getConversionMask() ); target.setDecimalSymbol( source.getDecimalSymbol() ); target.setGroupingSymbol( source.getGroupingSymbol() ); target.setStorageType( source.getStorageType() ); if ( source.getStorageMetadata() != null ) { target.setStorageMetadata( cloneValueMeta( source.getStorageMetadata(), source .getStorageMetadata().getType() ) ); } target.setStringEncoding( source.getStringEncoding() ); target.setTrimType( source.getTrimType() ); target.setDateFormatLenient( source.isDateFormatLenient() ); target.setDateFormatLocale( source.getDateFormatLocale() ); target.setDateFormatTimeZone( source.getDateFormatTimeZone() ); target.setLenientStringToNumber( source.isLenientStringToNumber() ); target.setLargeTextField( source.isLargeTextField() ); target.setComments( source.getComments() ); target.setCaseInsensitive( source.isCaseInsensitive() ); target.setCollatorDisabled( source.isCollatorDisabled() ); target.setCollatorStrength( source.getCollatorStrength() ); target.setIndex( source.getIndex() ); target.setOrigin( source.getOrigin() ); target.setOriginalAutoIncrement( source.isOriginalAutoIncrement() ); target.setOriginalColumnType( source.getOriginalColumnType() ); target.setOriginalColumnTypeName( source.getOriginalColumnTypeName() ); target.setOriginalNullable( source.isOriginalNullable() ); target.setOriginalPrecision( source.getOriginalPrecision() ); target.setOriginalScale( source.getOriginalScale() ); target.setOriginalSigned( source.isOriginalSigned() ); }
Example 10
Source File: DatabaseLookupUTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private Database createVirtualDb( DatabaseMeta meta ) throws Exception { ResultSet rs = mock( ResultSet.class ); when( rs.getMetaData() ).thenReturn( mock( ResultSetMetaData.class ) ); PreparedStatement ps = mock( PreparedStatement.class ); when( ps.executeQuery() ).thenReturn( rs ); Connection connection = mock( Connection.class ); when( connection.prepareStatement( anyString() ) ).thenReturn( ps ); Database db = new Database( mock( LoggingObjectInterface.class ), meta ); db.setConnection( connection ); db = spy( db ); doNothing().when( db ).normalConnect( anyString() ); ValueMetaInterface binary = new ValueMetaString( BINARY_FIELD ); binary.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); ValueMetaInterface id = new ValueMetaInteger( ID_FIELD ); RowMetaInterface metaByQuerying = new RowMeta(); metaByQuerying.addValueMeta( binary ); metaByQuerying.addValueMeta( id ); doReturn( metaByQuerying ).when( db ).getTableFields( anyString() ); doReturn( metaByQuerying ).when( db ).getTableFieldsMeta( anyString(), anyString() ); return db; }
Example 11
Source File: SetValueConstantTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void testUpdateField() throws Exception { SetValueConstant step = new SetValueConstant( smh.stepMeta, smh.stepDataInterface, 0, smh.transMeta, smh.trans ); ValueMetaInterface valueMeta = new ValueMetaString( "Field1" ); valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); RowMeta rowMeta = new RowMeta(); rowMeta.addValueMeta( valueMeta ); SetValueConstantMeta.Field field = new SetValueConstantMeta.Field(); field.setFieldName( "Field Name" ); field.setEmptyString( true ); field.setReplaceMask( "Replace Mask" ); field.setReplaceValue( "Replace Value" ); doReturn( Collections.singletonList( field ) ).when( smh.initStepMetaInterface ).getFields(); doReturn( field ).when( smh.initStepMetaInterface ).getField( 0 ); doReturn( rowMeta ).when( smh.initStepDataInterface ).getConvertRowMeta(); doReturn( rowMeta ).when( smh.initStepDataInterface ).getOutputRowMeta(); doReturn( 1 ).when( smh.initStepDataInterface ).getFieldnr(); doReturn( new int[] { 0 } ).when( smh.initStepDataInterface ).getFieldnrs(); doReturn( new String[] { "foo" } ).when( smh.initStepDataInterface ).getRealReplaceByValues(); step.init( smh.initStepMetaInterface, smh.initStepDataInterface ); Method m = SetValueConstant.class.getDeclaredMethod( "updateField", Object[].class ); m.setAccessible( true ); Object[] row = new Object[] { null }; m.invoke( step, new Object[] { row } ); Assert.assertEquals( "foo", valueMeta.getString( row[0] ) ); }
Example 12
Source File: FuzzyMatchTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void testLookupValuesWhenMainFieldIsNull() throws Exception { FuzzyMatchData data = spy( new FuzzyMatchData() ); FuzzyMatchMeta meta = spy( new FuzzyMatchMeta() ); data.readLookupValues = false; fuzzyMatch = new FuzzyMatchHandler( mockHelper.stepMeta, mockHelper.stepDataInterface, 0, mockHelper.transMeta, mockHelper.trans ); fuzzyMatch.init( meta, data ); fuzzyMatch.first = false; data.indexOfMainField = 1; Object[] inputRow = { "test input", null }; RowSet lookupRowSet = mockHelper.getMockInputRowSet( new Object[]{ "test lookup" } ); fuzzyMatch.addRowSetToInputRowSets( mockHelper.getMockInputRowSet( inputRow ) ); fuzzyMatch.addRowSetToInputRowSets( lookupRowSet ); fuzzyMatch.rowset = lookupRowSet; RowMetaInterface rowMetaInterface = new RowMeta(); ValueMetaInterface valueMeta = new ValueMetaString( "field1" ); valueMeta.setStorageMetadata( new ValueMetaString( "field1" ) ); valueMeta.setStorageType( ValueMetaInterface.TYPE_STRING ); rowMetaInterface.addValueMeta( valueMeta ); when( lookupRowSet.getRowMeta() ).thenReturn( rowMetaInterface ); fuzzyMatch.setInputRowMeta( rowMetaInterface.clone() ); data.outputRowMeta = rowMetaInterface.clone(); fuzzyMatch.processRow( meta, data ); Assert.assertEquals( inputRow[0], fuzzyMatch.resultRow[0] ); Assert.assertNull( fuzzyMatch.resultRow[1] ); Assert.assertTrue( Arrays.stream( fuzzyMatch.resultRow, 3, fuzzyMatch.resultRow.length ).allMatch( val -> val == null ) ); }
Example 13
Source File: S3CsvInput.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (S3CsvInputMeta) smi; data = (S3CsvInputData) sdi; if ( first ) { first = false; data.outputRowMeta = new RowMeta(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this ); if ( data.filenames == null ) { // We're expecting the list of filenames from the previous step(s)... // getFilenamesFromPreviousSteps(); } // We only run in parallel if we have at least one file to process // AND if we have more than one step copy running... // data.parallel = meta.isRunningInParallel() && data.totalNumberOfSteps > 1; // The conversion logic for when the lazy conversion is turned of is simple: // Pretend it's a lazy conversion object anyway and get the native type during conversion. // data.convertRowMeta = data.outputRowMeta.clone(); for ( ValueMetaInterface valueMeta : data.convertRowMeta.getValueMetaList() ) { valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); } // Now handle the parallel reading aspect: determine total of all the file sizes // Then skip to the appropriate file and location in the file to start reading... // Also skip to right after the first newline // if ( data.parallel ) { prepareToRunInParallel(); } // Open the next file... // if ( !openNextFile() ) { setOutputDone(); return false; // nothing to see here, move along... } } // If we are running in parallel, make sure we don't read too much in this step copy... // if ( data.parallel ) { if ( data.totalBytesRead > data.blockToRead ) { setOutputDone(); // stop reading return false; } } Object[] outputRowData = readOneRow( true ); // get row, set busy! if ( outputRowData == null ) { // no more input to be expected... if ( openNextFile() ) { return true; // try again on the next loop... } else { setOutputDone(); // last file, end here return false; } } else { putRow( data.outputRowMeta, outputRowData ); // copy row to possible alternate rowset(s). if ( checkFeedback( getLinesInput() ) ) { if ( log.isBasic() ) { logBasic( Messages.getString( "S3CsvInput.Log.LineNumber", Long.toString( getLinesInput() ) ) ); //$NON-NLS-1$ } } } return true; }
Example 14
Source File: CassandraColumnMetaData.java From learning-hadoop with Apache License 2.0 | 4 votes |
/** * Get the Kettle ValueMeta that corresponds to the type of the supplied * cassandra column. * * @param colName the name of the column to get a ValueMeta for * @return the ValueMeta that is appropriate for the type of the supplied * column. */ public ValueMetaInterface getValueMetaForColumn(String colName) { String type = null; // check the key first if (colName.equals(getKeyName())) { type = m_keyValidator; } else { type = m_columnMeta.get(colName); if (type == null) { type = m_defaultValidationClass; } } int kettleType = 0; if (type.indexOf("UTF8Type") > 0 || type.indexOf("AsciiType") > 0 || type.indexOf("UUIDType") > 0 || type.indexOf("CompositeType") > 0) { kettleType = ValueMetaInterface.TYPE_STRING; } else if (type.indexOf("LongType") > 0 || type.indexOf("IntegerType") > 0 || type.indexOf("Int32Type") > 0) { kettleType = ValueMetaInterface.TYPE_INTEGER; } else if (type.indexOf("DoubleType") > 0 || type.indexOf("FloatType") > 0) { kettleType = ValueMetaInterface.TYPE_NUMBER; } else if (type.indexOf("DateType") > 0) { kettleType = ValueMetaInterface.TYPE_DATE; } else if (type.indexOf("DecimalType") > 0) { kettleType = ValueMetaInterface.TYPE_BIGNUMBER; } else if (type.indexOf("BytesType") > 0) { kettleType = ValueMetaInterface.TYPE_BINARY; } else if (type.indexOf("BooleanType") > 0) { kettleType = ValueMetaInterface.TYPE_BOOLEAN; } ValueMetaInterface newVM = new ValueMeta(colName, kettleType); if (m_indexedVals.containsKey(colName)) { // make it indexed! newVM.setStorageType(ValueMetaInterface.STORAGE_TYPE_INDEXED); HashSet<Object> indexedV = m_indexedVals.get(colName); Object[] iv = indexedV.toArray(); newVM.setIndex(iv); } return newVM; }
Example 15
Source File: S3CsvInputMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { rowMeta.clear(); // Start with a clean slate, eats the input for ( int i = 0; i < inputFields.length; i++ ) { TextFileInputField field = inputFields[i]; ValueMetaInterface valueMeta = new ValueMeta( field.getName(), field.getType() ); valueMeta.setConversionMask( field.getFormat() ); valueMeta.setLength( field.getLength() ); valueMeta.setPrecision( field.getPrecision() ); valueMeta.setConversionMask( field.getFormat() ); valueMeta.setDecimalSymbol( field.getDecimalSymbol() ); valueMeta.setGroupingSymbol( field.getGroupSymbol() ); valueMeta.setCurrencySymbol( field.getCurrencySymbol() ); valueMeta.setTrimType( field.getTrimType() ); if ( lazyConversionActive ) { valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); } // In case we want to convert Strings... // Using a copy of the valueMeta object means that the inner and outer representation format is the same. // Preview will show the data the same way as we read it. // This layout is then taken further down the road by the metadata through the transformation. // ValueMetaInterface storageMetadata = valueMeta.clone(); storageMetadata.setType( ValueMetaInterface.TYPE_STRING ); storageMetadata.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); storageMetadata.setLength( -1, -1 ); // we don't really know the lengths of the strings read in advance. valueMeta.setStorageMetadata( storageMetadata ); valueMeta.setOrigin( origin ); rowMeta.addValueMeta( valueMeta ); } if ( !Utils.isEmpty( filenameField ) && includingFilename ) { ValueMetaInterface filenameMeta = new ValueMetaString( filenameField ); filenameMeta.setOrigin( origin ); if ( lazyConversionActive ) { filenameMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); filenameMeta.setStorageMetadata( new ValueMetaString( filenameField ) ); } rowMeta.addValueMeta( filenameMeta ); } if ( !Utils.isEmpty( rowNumField ) ) { ValueMetaInterface rowNumMeta = new ValueMetaInteger( rowNumField ); rowNumMeta.setLength( 10 ); rowNumMeta.setOrigin( origin ); rowMeta.addValueMeta( rowNumMeta ); } }
Example 16
Source File: FuzzyMatchTest.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Test public void testReadLookupValues() throws Exception { FuzzyMatchData data = spy( new FuzzyMatchData() ); data.indexOfCachedFields = new int[2]; data.minimalDistance = 0; data.maximalDistance = 5; FuzzyMatchMeta meta = spy( new FuzzyMatchMeta() ); meta.setOutputMatchField( "I don't want NPE here!" ); data.readLookupValues = true; fuzzyMatch = new FuzzyMatchHandler( mockHelper.stepMeta, mockHelper.stepDataInterface, 0, mockHelper.transMeta, mockHelper.trans ); fuzzyMatch.init( meta, data ); RowSet lookupRowSet = mockHelper.getMockInputRowSet( binaryLookupRows ); fuzzyMatch.addRowSetToInputRowSets( mockHelper.getMockInputRowSet( binaryRows ) ); fuzzyMatch.addRowSetToInputRowSets( lookupRowSet ); fuzzyMatch.rowset = lookupRowSet; RowMetaInterface rowMetaInterface = new RowMeta(); ValueMetaInterface valueMeta = new ValueMetaString( "field1" ); valueMeta.setStorageMetadata( new ValueMetaString( "field1" ) ); valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); rowMetaInterface.addValueMeta( valueMeta ); when( lookupRowSet.getRowMeta() ).thenReturn( rowMetaInterface ); when( meta.getLookupField() ).thenReturn( "field1" ); when( meta.getMainStreamField() ).thenReturn( "field1" ); fuzzyMatch.setInputRowMeta( rowMetaInterface.clone() ); when( meta.getAlgorithmType() ).thenReturn( 1 ); StepIOMetaInterface stepIOMetaInterface = mock( StepIOMetaInterface.class ); when( meta.getStepIOMeta() ).thenReturn( stepIOMetaInterface ); StreamInterface streamInterface = mock( StreamInterface.class ); List<StreamInterface> streamInterfaceList = new ArrayList<StreamInterface>(); streamInterfaceList.add( streamInterface ); when( streamInterface.getStepMeta() ).thenReturn( mockHelper.stepMeta ); when( stepIOMetaInterface.getInfoStreams() ).thenReturn( streamInterfaceList ); fuzzyMatch.processRow( meta, data ); Assert.assertEquals( rowMetaInterface.getString( row3B, 0 ), data.outputRowMeta.getString( fuzzyMatch.resultRow, 1 ) ); }
Example 17
Source File: CsvInputMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { try { rowMeta.clear(); // Start with a clean slate, eats the input for ( int i = 0; i < inputFields.length; i++ ) { TextFileInputField field = inputFields[i]; ValueMetaInterface valueMeta = ValueMetaFactory.createValueMeta( field.getName(), field.getType() ); valueMeta.setConversionMask( field.getFormat() ); valueMeta.setLength( field.getLength() ); valueMeta.setPrecision( field.getPrecision() ); valueMeta.setConversionMask( field.getFormat() ); valueMeta.setDecimalSymbol( field.getDecimalSymbol() ); valueMeta.setGroupingSymbol( field.getGroupSymbol() ); valueMeta.setCurrencySymbol( field.getCurrencySymbol() ); valueMeta.setTrimType( field.getTrimType() ); if ( lazyConversionActive ) { valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); } valueMeta.setStringEncoding( space.environmentSubstitute( encoding ) ); // In case we want to convert Strings... // Using a copy of the valueMeta object means that the inner and outer representation format is the same. // Preview will show the data the same way as we read it. // This layout is then taken further down the road by the metadata through the transformation. // ValueMetaInterface storageMetadata = ValueMetaFactory.cloneValueMeta( valueMeta, ValueMetaInterface.TYPE_STRING ); storageMetadata.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); storageMetadata.setLength( -1, -1 ); // we don't really know the lengths of the strings read in advance. valueMeta.setStorageMetadata( storageMetadata ); valueMeta.setOrigin( origin ); rowMeta.addValueMeta( valueMeta ); } if ( !Utils.isEmpty( filenameField ) && includingFilename ) { ValueMetaInterface filenameMeta = new ValueMetaString( filenameField ); filenameMeta.setOrigin( origin ); if ( lazyConversionActive ) { filenameMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); filenameMeta.setStorageMetadata( new ValueMetaString( filenameField ) ); } rowMeta.addValueMeta( filenameMeta ); } if ( !Utils.isEmpty( rowNumField ) ) { ValueMetaInterface rowNumMeta = new ValueMetaInteger( rowNumField ); rowNumMeta.setLength( 10 ); rowNumMeta.setOrigin( origin ); rowMeta.addValueMeta( rowNumMeta ); } } catch ( Exception e ) { throw new KettleStepException( e ); } }
Example 18
Source File: TableInputMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( databaseMeta == null ) { return; // TODO: throw an exception here } if ( cachedRowMetaActive ) { row.addRowMeta( cachedRowMeta ); return; } boolean param = false; Database db = getDatabase(); super.databases = new Database[] { db }; // keep track of it for canceling purposes... // First try without connecting to the database... (can be S L O W) String sNewSQL = sql; if ( isVariableReplacementActive() ) { sNewSQL = db.environmentSubstitute( sql ); if ( space != null ) { sNewSQL = space.environmentSubstitute( sNewSQL ); } } RowMetaInterface add = null; try { add = db.getQueryFields( sNewSQL, param ); } catch ( KettleDatabaseException dbe ) { throw new KettleStepException( "Unable to get queryfields for SQL: " + Const.CR + sNewSQL, dbe ); } if ( add != null ) { attachOrigin( add, origin ); row.addRowMeta( add ); } else { try { db.connect(); RowMetaInterface paramRowMeta = null; Object[] paramData = null; StreamInterface infoStream = getStepIOMeta().getInfoStreams().get( 0 ); if ( !Utils.isEmpty( infoStream.getStepname() ) ) { param = true; if ( info.length > 0 && info[ 0 ] != null ) { paramRowMeta = info[ 0 ]; paramData = RowDataUtil.allocateRowData( paramRowMeta.size() ); } } add = db.getQueryFields( sNewSQL, param, paramRowMeta, paramData ); if ( add == null ) { return; } attachOrigin( add, origin ); row.addRowMeta( add ); } catch ( KettleException ke ) { throw new KettleStepException( "Unable to get queryfields for SQL: " + Const.CR + sNewSQL, ke ); } finally { db.disconnect(); } } if ( isLazyConversionActive() ) { for ( int i = 0; i < row.size(); i++ ) { ValueMetaInterface v = row.getValueMeta( i ); try { if ( v.getType() == ValueMetaInterface.TYPE_STRING ) { ValueMetaInterface storageMeta = ValueMetaFactory.cloneValueMeta( v ); storageMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); v.setStorageMetadata( storageMeta ); v.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); } } catch ( KettlePluginException e ) { throw new KettleStepException( "Unable to clone meta for lazy conversion: " + Const.CR + v, e ); } } } }
Example 19
Source File: ParGzipCsvInputMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { try { rowMeta.clear(); // Start with a clean slate, eats the input for ( int i = 0; i < inputFields.length; i++ ) { TextFileInputField field = inputFields[i]; ValueMetaInterface valueMeta = ValueMetaFactory.createValueMeta( field.getName(), field.getType() ); valueMeta.setConversionMask( field.getFormat() ); valueMeta.setLength( field.getLength() ); valueMeta.setPrecision( field.getPrecision() ); valueMeta.setConversionMask( field.getFormat() ); valueMeta.setDecimalSymbol( field.getDecimalSymbol() ); valueMeta.setGroupingSymbol( field.getGroupSymbol() ); valueMeta.setCurrencySymbol( field.getCurrencySymbol() ); valueMeta.setTrimType( field.getTrimType() ); if ( lazyConversionActive ) { valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); } valueMeta.setStringEncoding( space.environmentSubstitute( encoding ) ); // In case we want to convert Strings... // Using a copy of the valueMeta object means that the inner and outer representation format is the same. // Preview will show the data the same way as we read it. // This layout is then taken further down the road by the metadata through the transformation. // ValueMetaInterface storageMetadata = ValueMetaFactory.cloneValueMeta( valueMeta, ValueMetaInterface.TYPE_STRING ); storageMetadata.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); storageMetadata.setLength( -1, -1 ); // we don't really know the lengths of the strings read in advance. valueMeta.setStorageMetadata( storageMetadata ); valueMeta.setOrigin( origin ); rowMeta.addValueMeta( valueMeta ); } if ( !Utils.isEmpty( filenameField ) && includingFilename ) { ValueMetaInterface filenameMeta = new ValueMetaString( filenameField ); filenameMeta.setOrigin( origin ); if ( lazyConversionActive ) { filenameMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); filenameMeta.setStorageMetadata( new ValueMetaString( filenameField ) ); } rowMeta.addValueMeta( filenameMeta ); } if ( !Utils.isEmpty( rowNumField ) ) { ValueMetaInterface rowNumMeta = new ValueMetaInteger( rowNumField ); rowNumMeta.setLength( 10 ); rowNumMeta.setOrigin( origin ); rowMeta.addValueMeta( rowNumMeta ); } } catch ( Exception e ) { throw new KettleStepException( e ); } }
Example 20
Source File: SelectValuesMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public void getMetadataFields( RowMetaInterface inputRowMeta, String name, VariableSpace space ) throws KettlePluginException { if ( meta != null && meta.length > 0 ) { // METADATA mode: change the meta-data of the values mentioned... for ( int i = 0; i < meta.length; i++ ) { SelectMetadataChange metaChange = meta[i]; int idx = inputRowMeta.indexOfValue( metaChange.getName() ); boolean metaTypeChangeUsesNewTypeDefaults = false; // Normal behavior as of 5.x or so if ( space != null ) { metaTypeChangeUsesNewTypeDefaults = ValueMetaBase.convertStringToBoolean( space.getVariable( Const.KETTLE_COMPATIBILITY_SELECT_VALUES_TYPE_CHANGE_USES_TYPE_DEFAULTS, "N" ) ); } if ( idx >= 0 ) { // We found the value // This is the value we need to change: ValueMetaInterface v = inputRowMeta.getValueMeta( idx ); // Do we need to rename ? if ( !v.getName().equals( metaChange.getRename() ) && !Utils.isEmpty( metaChange.getRename() ) ) { v.setName( metaChange.getRename() ); v.setOrigin( name ); // need to reinsert to check name conflicts inputRowMeta.setValueMeta( idx, v ); } // Change the type? if ( metaChange.getType() != ValueMetaInterface.TYPE_NONE && v.getType() != metaChange.getType() ) { // Fix for PDI-16388 - clone copies over the conversion mask instead of using the default for the new type if ( !metaTypeChangeUsesNewTypeDefaults ) { v = ValueMetaFactory.cloneValueMeta( v, metaChange.getType() ); } else { v = ValueMetaFactory.createValueMeta( v.getName(), metaChange.getType() ); } // This is now a copy, replace it in the row! // inputRowMeta.setValueMeta( idx, v ); // This also moves the data to normal storage type // v.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); } if ( metaChange.getLength() != UNDEFINED ) { v.setLength( metaChange.getLength() ); v.setOrigin( name ); } if ( metaChange.getPrecision() != UNDEFINED ) { v.setPrecision( metaChange.getPrecision() ); v.setOrigin( name ); } if ( metaChange.getStorageType() >= 0 ) { v.setStorageType( metaChange.getStorageType() ); v.setOrigin( name ); } if ( !Utils.isEmpty( metaChange.getConversionMask() ) ) { v.setConversionMask( metaChange.getConversionMask() ); v.setOrigin( name ); } v.setDateFormatLenient( metaChange.isDateFormatLenient() ); v.setDateFormatLocale( EnvUtil.createLocale( metaChange.getDateFormatLocale() ) ); v.setDateFormatTimeZone( EnvUtil.createTimeZone( metaChange.getDateFormatTimeZone() ) ); v.setLenientStringToNumber( metaChange.isLenientStringToNumber() ); if ( !Utils.isEmpty( metaChange.getEncoding() ) ) { v.setStringEncoding( metaChange.getEncoding() ); v.setOrigin( name ); } if ( !Utils.isEmpty( metaChange.getDecimalSymbol() ) ) { v.setDecimalSymbol( metaChange.getDecimalSymbol() ); v.setOrigin( name ); } if ( !Utils.isEmpty( metaChange.getGroupingSymbol() ) ) { v.setGroupingSymbol( metaChange.getGroupingSymbol() ); v.setOrigin( name ); } if ( !Utils.isEmpty( metaChange.getCurrencySymbol() ) ) { v.setCurrencySymbol( metaChange.getCurrencySymbol() ); v.setOrigin( name ); } } } } }