Java Code Examples for org.pentaho.di.core.row.ValueMetaInterface#TYPE_STRING
The following examples show how to use
org.pentaho.di.core.row.ValueMetaInterface#TYPE_STRING .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KettleRowCoder.java From kettle-beam with Apache License 2.0 | 6 votes |
private int getObjectType( Object object ) throws CoderException { if ( object instanceof String ) { return ValueMetaInterface.TYPE_STRING; } if ( object instanceof Long ) { return ValueMetaInterface.TYPE_INTEGER; } if ( object instanceof Date ) { return ValueMetaInterface.TYPE_DATE; } if ( object instanceof Timestamp ) { return ValueMetaInterface.TYPE_TIMESTAMP; } if ( object instanceof Boolean ) { return ValueMetaInterface.TYPE_BOOLEAN; } if ( object instanceof Double) { return ValueMetaInterface.TYPE_NUMBER; } if ( object instanceof BigDecimal ) { return ValueMetaInterface.TYPE_BIGNUMBER; } throw new CoderException( "Data type for object class "+object.getClass().getName()+" isn't supported yet" ); }
Example 2
Source File: ValueMetaBaseTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Test public void testCompareDateWithStorageMask() throws KettleValueException { ValueMetaBase storageMeta = new ValueMetaBase( "string", ValueMetaInterface.TYPE_STRING ); storageMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); storageMeta.setConversionMask( "MM/dd/yyyy HH:mm" ); ValueMetaBase dateMeta = new ValueMetaBase( "date", ValueMetaInterface.TYPE_DATE ); dateMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); dateMeta.setStorageMetadata( storageMeta ); dateMeta.setConversionMask( "yyyy-MM-dd" ); ValueMetaBase targetDateMeta = new ValueMetaBase( "date", ValueMetaInterface.TYPE_DATE ); targetDateMeta.setConversionMask( "yyyy-MM-dd" ); targetDateMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); String date = "2/24/2017 0:00"; Date equalDate = new GregorianCalendar( 2017, Calendar.FEBRUARY, 24 ).getTime(); assertEquals( 0, dateMeta.compare( date.getBytes(), targetDateMeta, equalDate ) ); Date pastDate = new GregorianCalendar( 2017, Calendar.JANUARY, 24 ).getTime(); assertEquals( 1, dateMeta.compare( date.getBytes(), targetDateMeta, pastDate ) ); Date futureDate = new GregorianCalendar( 2017, Calendar.MARCH, 24 ).getTime(); assertEquals( -1, dateMeta.compare( date.getBytes(), targetDateMeta, futureDate ) ); }
Example 3
Source File: PentahoOrcReadWriteTest.java From pentaho-hadoop-shims with Apache License 2.0 | 6 votes |
private ValueMetaInterface getValueMetaInterface( String fieldName, int fieldType ) { switch ( fieldType ) { case ValueMetaInterface.TYPE_INET: return new ValueMetaInternetAddress( fieldName ); case ValueMetaInterface.TYPE_STRING: return new ValueMetaString( fieldName ); case ValueMetaInterface.TYPE_INTEGER: return new ValueMetaInteger( fieldName ); case ValueMetaInterface.TYPE_NUMBER: return new ValueMetaNumber( fieldName ); case ValueMetaInterface.TYPE_BIGNUMBER: return new ValueMetaBigNumber( fieldName ); case ValueMetaInterface.TYPE_TIMESTAMP: return new ValueMetaTimestamp( fieldName ); case ValueMetaInterface.TYPE_DATE: return new ValueMetaDate( fieldName ); case ValueMetaInterface.TYPE_BOOLEAN: return new ValueMetaBoolean( fieldName ); case ValueMetaInterface.TYPE_BINARY: return new ValueMetaBinary( fieldName ); } return null; }
Example 4
Source File: DimensionLookupMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public static final int getUpdateType( boolean upd, String ty ) { if ( upd ) { for ( int i = 0; i < typeCodes.length; i++ ) { if ( typeCodes[i].equalsIgnoreCase( ty ) ) { return i; } } // for compatibility: for ( int i = 0; i < typeDesc.length; i++ ) { if ( typeDesc[i].equalsIgnoreCase( ty ) ) { return i; } } if ( "Y".equalsIgnoreCase( ty ) ) { return TYPE_UPDATE_DIM_PUNCHTHROUGH; } return TYPE_UPDATE_DIM_INSERT; // INSERT is the default: don't lose information. } else { int retval = ValueMetaFactory.getIdForValueMeta( ty ); if ( retval == ValueMetaInterface.TYPE_NONE ) { retval = ValueMetaInterface.TYPE_STRING; } return retval; } }
Example 5
Source File: CassandraColumnMetaData.java From learning-hadoop with Apache License 2.0 | 6 votes |
/** * Return the Cassandra column type (internal cassandra class name relative to * org.apache.cassandra.db.marshal) for the given Kettle column. * * @param vm the ValueMetaInterface for the Kettle column * @return the corresponding internal cassandra type. */ public static String getCassandraTypeForValueMeta(ValueMetaInterface vm) { switch (vm.getType()) { case ValueMetaInterface.TYPE_STRING: return "UTF8Type"; case ValueMetaInterface.TYPE_BIGNUMBER: return "DecimalType"; case ValueMetaInterface.TYPE_BOOLEAN: return "BooleanType"; case ValueMetaInterface.TYPE_INTEGER: return "LongType"; case ValueMetaInterface.TYPE_NUMBER: return "DoubleType"; case ValueMetaInterface.TYPE_DATE: return "DateType"; case ValueMetaInterface.TYPE_BINARY: case ValueMetaInterface.TYPE_SERIALIZABLE: return "BytesType"; } return "UTF8Type"; }
Example 6
Source File: LDAPInputField.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public LDAPInputField( String fieldname ) { this.name = fieldname; this.attribute = ""; this.length = -1; this.fetchAttributeAs = FETCH_ATTRIBUTE_AS_STRING; this.type = ValueMetaInterface.TYPE_STRING; this.format = ""; this.trimtype = TYPE_TRIM_NONE; this.groupSymbol = ""; this.decimalSymbol = ""; this.currencySymbol = ""; this.precision = -1; this.repeat = false; this.realAttribute = ""; this.sortedKey = false; }
Example 7
Source File: JobGenerator.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private ValueMetaInterface getValueForLogicalColumn(DatabaseMeta databaseMeta, LogicalColumn column) { String columnName = ConceptUtil.getName(column, locale); String phColumnName = ConceptUtil.getString(column, DefaultIDs.LOGICAL_COLUMN_PHYSICAL_COLUMN_NAME); DataType columnType = column.getDataType(); String lengthString = ConceptUtil.getString(column, DefaultIDs.LOGICAL_COLUMN_LENGTH); int length = Const.toInt(lengthString, -1); String precisionString = ConceptUtil.getString(column, DefaultIDs.LOGICAL_COLUMN_PRECISION); int precision = Const.toInt(precisionString, -1); int type=ValueMetaInterface.TYPE_STRING; switch(columnType) { case UNKNOWN: case URL: case STRING: precision=-1; break; case IMAGE: case BINARY: type = ValueMetaInterface.TYPE_BINARY; precision=-1; break; case BOOLEAN: type = ValueMetaInterface.TYPE_BOOLEAN; length=-1; precision=-1; break; case DATE: type = ValueMetaInterface.TYPE_DATE; length=-1; precision=-1; break; case NUMERIC: if (precision<=0 && length<15) { type = ValueMetaInterface.TYPE_INTEGER; } else { if (length>=15) { type = ValueMetaInterface.TYPE_BIGNUMBER; } else { type = ValueMetaInterface.TYPE_NUMBER; } } break; default: break; } ValueMetaInterface value = new ValueMeta(databaseMeta.quoteField(Const.NVL(phColumnName, columnName)), type); value.setLength(length, precision); return value; }
Example 8
Source File: ValueMetaConverter.java From pentaho-kettle with Apache License 2.0 | 5 votes |
protected Object convertFromDateMetaInterface( int targetValueMetaType, Object value ) throws ValueMetaConversionException { if ( value == null ) { return null; } // value is expected to be of type Date if ( !( value instanceof Date ) ) { handleConversionError( "Error. Expecting value of type Date. actual value type = '" + value.getClass() + "'. value = '" + value + "'." ); } try { Date dateValue = (Date) value; switch ( targetValueMetaType ) { case ValueMetaInterface.TYPE_INTEGER: return dateValue.getTime(); case ValueMetaInterface.TYPE_STRING: return datePattern.format( dateValue ); case ValueMetaInterface.TYPE_TIMESTAMP: return new Timestamp( dateValue.getTime() ); case ValueMetaInterface.TYPE_DATE: return new Date( dateValue.getTime() ); default: throwBadConversionCombination( ValueMetaInterface.TYPE_DATE, targetValueMetaType, value ); } } catch ( Exception e ) { throwErroredConversion( ValueMetaInterface.TYPE_DATE, targetValueMetaType, value, e ); } return null; }
Example 9
Source File: RssInputField.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public RssInputField( String fieldname ) { this.name = fieldname; this.column = COLUMN_TITLE; this.length = -1; this.type = ValueMetaInterface.TYPE_STRING; this.format = ""; this.trimtype = TYPE_TRIM_NONE; this.groupSymbol = ""; this.decimalSymbol = ""; this.currencySymbol = ""; this.precision = -1; this.repeat = false; }
Example 10
Source File: CiviOutputMeta.java From civicrm-data-integration with GNU General Public License v3.0 | 5 votes |
public void getFields(RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) { // super.getFields(r, origin, info, nextStep, space); if (civiCrmResultField != null && !civiCrmResultField.equals("")) { ValueMetaInterface v = new ValueMeta(civiCrmResultField, ValueMetaInterface.TYPE_STRING); v.setOrigin(origin); r.addValueMeta(v); } }
Example 11
Source File: MySQLDatabaseMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc, boolean addFieldName, boolean addCr ) { String retval = ""; String fieldname = v.getName(); if ( v.getLength() == DatabaseMeta.CLOB_LENGTH ) { v.setLength( getMaxTextFieldLength() ); } int length = v.getLength(); int precision = v.getPrecision(); if ( addFieldName ) { retval += fieldname + " "; } int type = v.getType(); switch ( type ) { case ValueMetaInterface.TYPE_TIMESTAMP: case ValueMetaInterface.TYPE_DATE: retval += "DATETIME"; break; case ValueMetaInterface.TYPE_BOOLEAN: if ( supportsBooleanDataType() ) { retval += "BOOLEAN"; } else { retval += "CHAR(1)"; } break; case ValueMetaInterface.TYPE_NUMBER: case ValueMetaInterface.TYPE_INTEGER: case ValueMetaInterface.TYPE_BIGNUMBER: if ( fieldname.equalsIgnoreCase( tk ) || // Technical key fieldname.equalsIgnoreCase( pk ) // Primary key ) { if ( useAutoinc ) { retval += "BIGINT AUTO_INCREMENT NOT NULL PRIMARY KEY"; } else { retval += "BIGINT NOT NULL PRIMARY KEY"; } } else { // Integer values... if ( precision == 0 ) { if ( length > 9 ) { if ( length < 19 ) { // can hold signed values between -9223372036854775808 and 9223372036854775807 // 18 significant digits retval += "BIGINT"; } else { retval += "DECIMAL(" + length + ")"; } } else { retval += "INT"; } } else { // Floating point values... if ( length > 15 ) { retval += "DECIMAL(" + length; if ( precision > 0 ) { retval += ", " + precision; } retval += ")"; } else { // A double-precision floating-point number is accurate to approximately 15 decimal places. // http://mysql.mirrors-r-us.net/doc/refman/5.1/en/numeric-type-overview.html retval += "DOUBLE"; } } } break; case ValueMetaInterface.TYPE_STRING: if ( length > 0 ) { if ( length == 1 ) { retval += "CHAR(1)"; } else if ( length < 256 ) { retval += "VARCHAR(" + length + ")"; } else if ( length < 65536 ) { retval += "TEXT"; } else if ( length < 16777216 ) { retval += "MEDIUMTEXT"; } else { retval += "LONGTEXT"; } } else { retval += "TINYTEXT"; } break; case ValueMetaInterface.TYPE_BINARY: retval += "LONGBLOB"; break; default: retval += " UNKNOWN"; break; } if ( addCr ) { retval += Const.CR; } return retval; }
Example 12
Source File: NetezzaDatabaseMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc, boolean addFieldName, boolean addCr ) { String retval = ""; String fieldname = v.getName(); int length = v.getLength(); int precision = v.getPrecision(); if ( addFieldName ) { retval += fieldname + " "; } int type = v.getType(); switch ( type ) { case ValueMetaInterface.TYPE_DATE: retval += "date"; break; case ValueMetaInterface.TYPE_BOOLEAN: retval += "boolean"; break; case ValueMetaInterface.TYPE_NUMBER: case ValueMetaInterface.TYPE_INTEGER: case ValueMetaInterface.TYPE_BIGNUMBER: if ( length > 0 ) { if ( precision == 0 ) { if ( length <= 2 ) { retval += "byteint"; } else if ( length <= 4 ) { retval += "smallint"; } else if ( length <= 9 ) { retval += "integer"; } else { retval += "bigint"; } } else { if ( length < 9 ) { retval += "real"; } else if ( length < 18 ) { retval += "double"; } else { retval += "numeric(" + length; if ( precision > 0 ) { retval += ", " + precision; } retval += ")"; } } } break; case ValueMetaInterface.TYPE_STRING: if ( length > MAX_CHAR_LEN ) { retval += "varchar(" + MAX_CHAR_LEN + ")"; } else { retval += "varchar(" + length + ")"; } break; default: retval += " UNKNOWN"; break; } if ( addCr ) { retval += Const.CR; } return retval; }
Example 13
Source File: OpenERPHelper.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public ArrayList<FieldMapping> getDefaultFieldMappings( String model ) throws Exception { ArrayList<FieldMapping> mappings = new ArrayList<FieldMapping>(); ObjectAdapter adapter = new ObjectAdapter( openERPConnection, model ); FieldCollection fields = adapter.getFields(); FieldMapping fieldMap = new FieldMapping(); fieldMap.source_model = model; fieldMap.source_field = "id"; fieldMap.source_index = -1; fieldMap.target_model = model; fieldMap.target_field = "id"; fieldMap.target_field_label = "Database ID"; fieldMap.target_field_type = ValueMetaInterface.TYPE_INTEGER; mappings.add( fieldMap ); for ( Field field : fields ) { fieldMap = new FieldMapping(); String fieldName = field.getName(); fieldMap.source_model = model; fieldMap.source_field = fieldName; fieldMap.source_index = -1; fieldMap.target_model = model; fieldMap.target_field = fieldName; fieldMap.target_field_label = field.getDescription(); Field.FieldType fieldType = field.getType(); switch ( fieldType ) { case CHAR: case TEXT: case BINARY: // Binaries are base64 encoded strings fieldMap.target_field_type = ValueMetaInterface.TYPE_STRING; mappings.add( fieldMap ); break; case BOOLEAN: fieldMap.target_field_type = ValueMetaInterface.TYPE_BOOLEAN; mappings.add( fieldMap ); break; case FLOAT: fieldMap.target_field_type = ValueMetaInterface.TYPE_NUMBER; mappings.add( fieldMap ); break; case DATETIME: case DATE: fieldMap.target_field_type = ValueMetaInterface.TYPE_DATE; mappings.add( fieldMap ); break; case MANY2ONE: FieldMapping newFieldMap = fieldMap.Clone(); // Normal id field newFieldMap.source_index = 0; newFieldMap.target_model = field.getRelation(); newFieldMap.target_field = fieldName + "_id"; newFieldMap.target_field_label = field.getDescription() + "/Id"; newFieldMap.target_field_type = ValueMetaInterface.TYPE_INTEGER; mappings.add( newFieldMap ); // Add name field newFieldMap = fieldMap.Clone(); newFieldMap.source_index = 1; newFieldMap.target_model = field.getRelation(); newFieldMap.target_field = fieldName + "_name"; newFieldMap.target_field_label = field.getDescription() + "/Name"; newFieldMap.target_field_type = ValueMetaInterface.TYPE_STRING; mappings.add( newFieldMap ); break; case ONE2MANY: case MANY2MANY: default: fieldMap.target_field_type = ValueMetaInterface.TYPE_STRING; mappings.add( fieldMap ); break; } } return mappings; }
Example 14
Source File: AvroNestedReader.java From pentaho-hadoop-shims with Apache License 2.0 | 4 votes |
/** * Processes a map at this point in the path. * * @param map the map to process * @param s the current schema at this point in the path * @param ignoreMissing true if null is to be returned for user fields that don't appear in the schema * @return the field value or null for out-of-bounds array indexes, non-existent map keys or unsupported avro types. * @throws KettleException if a problem occurs */ public Object convertToKettleValue( AvroInputField avroInputField, Map<Utf8, Object> map, Schema s, Schema defaultSchema, boolean ignoreMissing ) throws KettleException { if ( map == null ) { return null; } if ( avroInputField.getTempParts().size() == 0 ) { throw new KettleException( BaseMessages.getString( PKG, "AvroInput.Error.MalformedPathMap" ) ); } String part = avroInputField.getTempParts().remove( 0 ); if ( !( part.charAt( 0 ) == '[' ) ) { throw new KettleException( BaseMessages.getString( PKG, "AvroInput.Error.MalformedPathMap2", part ) ); } String key = part.substring( 1, part.indexOf( ']' ) ); if ( part.indexOf( ']' ) < part.length() - 1 ) { // more dimensions to the array/map part = part.substring( part.indexOf( ']' ) + 1, part.length() ); avroInputField.getTempParts().add( 0, part ); } Object value = map.get( new Utf8( key ) ); if ( value == null ) { return null; } Schema valueType = s.getValueType(); if ( valueType.getType() == Schema.Type.UNION ) { if ( value instanceof GenericContainer ) { // we can ask these things for their schema (covers // records, arrays, enums and fixed) valueType = ( (GenericContainer) value ).getSchema(); } else { // either have a map or primitive here if ( value instanceof Map ) { // now have to look for the schema of the map Schema mapSchema = null; for ( Schema ts : valueType.getTypes() ) { if ( ts.getType() == Schema.Type.MAP ) { mapSchema = ts; break; } } if ( mapSchema == null ) { throw new KettleException( BaseMessages.getString( PKG, "AvroInput.Error.UnableToFindSchemaForUnionMap" ) ); } valueType = mapSchema; } else { if ( avroInputField.getTempValueMeta().getType() != ValueMetaInterface.TYPE_STRING ) { // we have a two element union, where one element is the type // "null". So in this case we actually have just one type and can // output specific values of it (instead of using String as a // catch all for varying primitive types in the union) valueType = checkUnion( valueType ); } else { // use the string representation of the value valueType = Schema.create( Schema.Type.STRING ); } } } } // what have we got? if ( valueType.getType() == Schema.Type.RECORD ) { return convertToKettleValue( avroInputField, (GenericData.Record) value, valueType, defaultSchema, ignoreMissing ); } else if ( valueType.getType() == Schema.Type.ARRAY ) { return convertToKettleValue( avroInputField, (GenericData.Array) value, valueType, defaultSchema, ignoreMissing ); } else if ( valueType.getType() == Schema.Type.MAP ) { return convertToKettleValue( avroInputField, (Map<Utf8, Object>) value, valueType, defaultSchema, ignoreMissing ); } else { // assume a primitive return getPrimitive( avroInputField, value, valueType ); } }
Example 15
Source File: AvroNestedReader.java From pentaho-hadoop-shims with Apache License 2.0 | 4 votes |
/** * Perform Kettle type conversions for the Avro leaf field value. * * @param fieldValue the leaf value from the Avro structure * @return an Object of the appropriate Kettle type * @throws KettleException if a problem occurs */ protected Object getKettleValue( AvroInputField avroInputField, Object fieldValue ) throws KettleException { switch ( avroInputField.getTempValueMeta().getType() ) { case ValueMetaInterface.TYPE_BIGNUMBER: return avroInputField.getTempValueMeta().getBigNumber( fieldValue ); case ValueMetaInterface.TYPE_BINARY: return avroInputField.getTempValueMeta().getBinary( fieldValue ); case ValueMetaInterface.TYPE_BOOLEAN: return avroInputField.getTempValueMeta().getBoolean( fieldValue ); case ValueMetaInterface.TYPE_DATE: if ( avroInputField.getAvroType().getBaseType() == AvroSpec.DataType.INTEGER.getBaseType() ) { LocalDate localDate = LocalDate.ofEpochDay( 0 ).plusDays( (Long) fieldValue ); return Date.from( localDate.atStartOfDay( ZoneId.systemDefault() ).toInstant() ); } else if ( avroInputField.getAvroType().getBaseType() == AvroSpec.DataType.STRING.getBaseType() ) { Object pentahoData = null; String dateFormatStr = avroInputField.getStringFormat(); if ( ( dateFormatStr == null ) || ( dateFormatStr.trim().length() == 0 ) ) { dateFormatStr = ValueMetaBase.DEFAULT_DATE_FORMAT_MASK; } SimpleDateFormat datePattern = new SimpleDateFormat( dateFormatStr ); try { return datePattern.parse( fieldValue.toString() ); } catch ( Exception e ) { return null; } } return avroInputField.getTempValueMeta().getDate( fieldValue ); case ValueMetaInterface.TYPE_TIMESTAMP: return new Timestamp( (Long) fieldValue ); case ValueMetaInterface.TYPE_INTEGER: return avroInputField.getTempValueMeta().getInteger( fieldValue ); case ValueMetaInterface.TYPE_NUMBER: return avroInputField.getTempValueMeta().getNumber( fieldValue ); case ValueMetaInterface.TYPE_STRING: return avroInputField.getTempValueMeta().getString( fieldValue ); case ValueMetaInterface.TYPE_INET: try { return InetAddress.getByName( fieldValue.toString() ); } catch ( UnknownHostException ex ) { return null; } default: return null; } }
Example 16
Source File: ExcelInputDialog.java From pentaho-kettle with Apache License 2.0 | 4 votes |
/** * Processing excel workbook, filling fields * * @param fields RowMetaInterface for filling fields * @param info ExcelInputMeta * @param workbook excel workbook for processing * @throws KettlePluginException */ private void processingWorkbook( RowMetaInterface fields, ExcelInputMeta info, KWorkbook workbook ) throws KettlePluginException { int nrSheets = workbook.getNumberOfSheets(); for ( int j = 0; j < nrSheets; j++ ) { KSheet sheet = workbook.getSheet( j ); // See if it's a selected sheet: int sheetIndex; if ( info.readAllSheets() ) { sheetIndex = 0; } else { sheetIndex = Const.indexOfString( sheet.getName(), info.getSheetName() ); } if ( sheetIndex >= 0 ) { // We suppose it's the complete range we're looking for... // int rownr = 0; int startcol = 0; if ( info.readAllSheets() ) { if ( info.getStartColumn().length == 1 ) { startcol = info.getStartColumn()[ 0 ]; } if ( info.getStartRow().length == 1 ) { rownr = info.getStartRow()[ 0 ]; } } else { rownr = info.getStartRow()[ sheetIndex ]; startcol = info.getStartColumn()[ sheetIndex ]; } boolean stop = false; for ( int colnr = startcol; !stop; colnr++ ) { try { String fieldname = null; int fieldtype = ValueMetaInterface.TYPE_NONE; KCell cell = sheet.getCell( colnr, rownr ); if ( cell == null ) { stop = true; } else { if ( cell.getType() != KCellType.EMPTY ) { // We found a field. fieldname = cell.getContents(); } // System.out.println("Fieldname = "+fieldname); KCell below = sheet.getCell( colnr, rownr + 1 ); if ( below != null ) { if ( below.getType() == KCellType.BOOLEAN ) { fieldtype = ValueMetaInterface.TYPE_BOOLEAN; } else if ( below.getType() == KCellType.DATE ) { fieldtype = ValueMetaInterface.TYPE_DATE; } else if ( below.getType() == KCellType.LABEL ) { fieldtype = ValueMetaInterface.TYPE_STRING; } else if ( below.getType() == KCellType.NUMBER ) { fieldtype = ValueMetaInterface.TYPE_NUMBER; } else { fieldtype = ValueMetaInterface.TYPE_STRING; } } else { fieldtype = ValueMetaInterface.TYPE_STRING; } if ( Utils.isEmpty( fieldname ) ) { stop = true; } else { if ( fieldtype != ValueMetaInterface.TYPE_NONE ) { ValueMetaInterface field = ValueMetaFactory.createValueMeta( fieldname, fieldtype ); fields.addValueMeta( field ); } } } } catch ( ArrayIndexOutOfBoundsException aioobe ) { // System.out.println("index out of bounds at column "+colnr+" : "+aioobe.toString()); stop = true; } } } } }
Example 17
Source File: ParameterSimpleTransIT.java From pentaho-kettle with Apache License 2.0 | 4 votes |
/** * Test case for parameters using a simple transformation. Here 1 parameter is not provided as value, so the default * will be used. * * @throws Exception * exception on any problem. */ public void testParameterSimpleTrans2() throws Exception { KettleEnvironment.init(); // // Create a new transformation... // TransMeta transMeta = new TransMeta(); transMeta.setName( "parameter_simple_trans2" ); PluginRegistry registry = PluginRegistry.getInstance(); // // create a get variables step... // String getVariablesStepname = "get variables step"; GetVariableMeta gvm = new GetVariableMeta(); // Set the information of the get variables step. String getVariablesPid = registry.getPluginId( StepPluginType.class, gvm ); StepMeta getVariablesStep = new StepMeta( getVariablesPid, getVariablesStepname, gvm ); transMeta.addStep( getVariablesStep ); // // Generate 1 row // String[] fieldName = { "Param1", "PARAM2" }; String[] varName = { "${Param1}", "%%PARAM2%%" }; int[] fieldType = { ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_STRING }; int[] length = { -1, -1 }; int[] precision = { -1, -1 }; String[] format = { "", "" }; String[] currency = { "", "" }; String[] decimal = { "", "" }; String[] grouping = { "", "" }; int[] trimType = { ValueMetaInterface.TRIM_TYPE_NONE, ValueMetaInterface.TRIM_TYPE_NONE }; FieldDefinition[] fields = new FieldDefinition[fieldName.length]; for ( int i = 0; i < fields.length; i++ ) { FieldDefinition field = new FieldDefinition(); field.setFieldName( fieldName[i] ); field.setVariableString( varName[i] ); field.setFieldType( fieldType[i] ); field.setFieldLength( length[i] ); field.setFieldPrecision( precision[i] ); field.setFieldFormat( format[i] ); field.setCurrency( currency[i] ); field.setDecimal( decimal[i] ); field.setGroup( grouping[i] ); field.setTrimType( trimType[i] ); fields[i] = field; } gvm.setFieldDefinitions( fields ); // // Create a dummy step 1 // String dummyStepname1 = "dummy step 1"; DummyTransMeta dm1 = new DummyTransMeta(); String dummyPid1 = registry.getPluginId( StepPluginType.class, dm1 ); StepMeta dummyStep1 = new StepMeta( dummyPid1, dummyStepname1, dm1 ); transMeta.addStep( dummyStep1 ); TransHopMeta hi1 = new TransHopMeta( getVariablesStep, dummyStep1 ); transMeta.addTransHop( hi1 ); // Now execute the transformation... Trans trans = new Trans( transMeta ); trans.addParameterDefinition( "Param1", "default1", "Parameter 1" ); trans.addParameterDefinition( "PARAM2", "default2", "Parameter 2" ); trans.setParameterValue( "Param1", "ParamValue1" ); // PARAM2 is not set trans.prepareExecution( null ); StepInterface si = trans.getStepInterface( dummyStepname1, 0 ); RowStepCollector endRc = new RowStepCollector(); si.addRowListener( endRc ); trans.startThreads(); trans.waitUntilFinished(); // Now check whether the output is still as we expect. List<RowMetaAndData> goldenImageRows = createResultData2(); List<RowMetaAndData> resultRows1 = endRc.getRowsWritten(); checkRows( resultRows1, goldenImageRows ); }
Example 18
Source File: FileStreamDialog.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override protected int[] getFieldTypes() { return new int[]{ ValueMetaInterface.TYPE_STRING }; }
Example 19
Source File: KettleToBQTableRowFn.java From kettle-beam with Apache License 2.0 | 4 votes |
@Override public TableRow apply( KettleRow inputRow ) { try { if ( rowMeta == null ) { readCounter = Metrics.counter( "read", counterName ); outputCounter = Metrics.counter( "output", counterName ); errorCounter = Metrics.counter( "error", counterName ); // Initialize Kettle Beam // BeamKettle.init( stepPluginClasses, xpPluginClasses ); rowMeta = JsonRowMeta.fromJson( rowMetaJson ); simpleDateFormat = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss.SSS" ); Metrics.counter( "init", counterName ).inc(); } readCounter.inc(); TableRow tableRow = new TableRow(); for (int i=0;i<rowMeta.size();i++) { ValueMetaInterface valueMeta = rowMeta.getValueMeta( i ); Object valueData = inputRow.getRow()[i]; if (!valueMeta.isNull( valueData )) { switch ( valueMeta.getType() ) { case ValueMetaInterface.TYPE_STRING: tableRow.put( valueMeta.getName(), valueMeta.getString( valueData ) ); break; case ValueMetaInterface.TYPE_INTEGER: tableRow.put( valueMeta.getName(), valueMeta.getInteger( valueData ) ); break; case ValueMetaInterface.TYPE_DATE: Date date = valueMeta.getDate( valueData ); String formattedDate = simpleDateFormat.format( date ); tableRow.put( valueMeta.getName(), formattedDate); break; case ValueMetaInterface.TYPE_BOOLEAN: tableRow.put( valueMeta.getName(), valueMeta.getBoolean( valueData ) ); break; case ValueMetaInterface.TYPE_NUMBER: tableRow.put( valueMeta.getName(), valueMeta.getNumber( valueData ) ); break; default: throw new RuntimeException( "Data type conversion from Kettle to BigQuery TableRow not supported yet: " +valueMeta.toString()); } } } // Pass the row to the process context // outputCounter.inc(); return tableRow; } catch ( Exception e ) { errorCounter.inc(); LOG.info( "Conversion error KettleRow to BigQuery TableRow : " + e.getMessage() ); throw new RuntimeException( "Error converting KettleRow to BigQuery TableRow", e ); } }
Example 20
Source File: XMLOutput.java From pentaho-kettle with Apache License 2.0 | 3 votes |
private boolean isNullValueAllowed( int valueMetaType ) { //Check if retro compatibility is set or not, to guaranty compatibility with older versions. //In 6.1 null values were written with string "null". Since then the attribute is not written. String val = getVariable( Const.KETTLE_COMPATIBILITY_XML_OUTPUT_NULL_VALUES, "N" ); return ValueMetaBase.convertStringToBoolean( Const.NVL( val, "N" ) ) && valueMetaType == ValueMetaInterface.TYPE_STRING; }