Java Code Examples for org.pentaho.di.core.row.ValueMetaInterface#isNull()
The following examples show how to use
org.pentaho.di.core.row.ValueMetaInterface#isNull() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: EditRowsDialog.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@VisibleForTesting Object[] getRowForData( TableItem item, int rowNr ) throws KettleException { try { Object[] row = RowDataUtil.allocateRowData( rowMeta.size() ); for ( int i = 0; i < rowMeta.size(); i++ ) { ValueMetaInterface valueMeta = rowMeta.getValueMeta( i ); ValueMetaInterface stringValueMeta = stringRowMeta.getValueMeta( i ); int colnr = i + 1; if ( isDisplayingNullValue( item, colnr ) ) { row[i] = null; // <null> value } else { String string = item.getText( colnr ); if ( stringValueMeta.isNull( string ) ) { string = null; } row[i] = valueMeta.convertDataFromString( string, stringValueMeta, null, null, ValueMetaInterface.TRIM_TYPE_NONE ); } } return row; } catch ( KettleException e ) { throw new KettleException( BaseMessages.getString( PKG, "EditRowsDialog.Error.ErrorGettingRowForData", Integer.toString( rowNr ) ), e ); } }
Example 2
Source File: CassandraOutputData.java From learning-hadoop with Apache License 2.0 | 5 votes |
/** * Checks for null row key and rows with no non-null values * * @param inputMeta * the input row meta * @param keyIndex * the index of the key field in the incoming row data * @param row * the row to check * @param log * logging * @return true if the row is OK * @throws KettleException * if a problem occurs */ protected static boolean preAddChecks(RowMetaInterface inputMeta, int keyIndex, Object[] row, LogChannelInterface log) throws KettleException { // check the key first ValueMetaInterface keyMeta = inputMeta.getValueMeta(keyIndex); if (keyMeta.isNull(row[keyIndex])) { log.logError(BaseMessages.getString(CassandraOutputMeta.PKG, "CassandraOutput.Error.SkippingRowNullKey", row)); return false; } // quick scan to see if we have at least one non-null value apart from // the key boolean ok = false; for (int i = 0; i < inputMeta.size(); i++) { if (i != keyIndex) { ValueMetaInterface v = inputMeta.getValueMeta(i); if (!v.isNull(row[i])) { ok = true; break; } } } if (!ok) { log.logError(BaseMessages.getString(CassandraOutputMeta.PKG, "CassandraOutput.Error.SkippingRowNoNonNullValues", keyMeta.getString(row[keyIndex]))); } return ok; }
Example 3
Source File: GroupBy.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private void addCumulativeSums( Object[] row ) throws KettleValueException { // We need to adjust this row with cumulative averages? // for ( int i = 0; i < data.cumulativeSumSourceIndexes.size(); i++ ) { int sourceIndex = data.cumulativeSumSourceIndexes.get( i ); Object previousTarget = data.previousSums[ i ]; Object sourceValue = row[ sourceIndex ]; int targetIndex = data.cumulativeSumTargetIndexes.get( i ); ValueMetaInterface sourceMeta = data.inputRowMeta.getValueMeta( sourceIndex ); ValueMetaInterface targetMeta = data.outputRowMeta.getValueMeta( targetIndex ); // If the first values where null, or this is the first time around, just take the source value... // if ( targetMeta.isNull( previousTarget ) ) { row[ targetIndex ] = sourceMeta.convertToNormalStorageType( sourceValue ); } else { // If the source value is null, just take the previous target value // if ( sourceMeta.isNull( sourceValue ) ) { row[ targetIndex ] = previousTarget; } else { row[ targetIndex ] = ValueDataUtil.plus( targetMeta, data.previousSums[ i ], sourceMeta, row[ sourceIndex ] ); } } data.previousSums[ i ] = row[ targetIndex ]; } }
Example 4
Source File: KettleToBQTableRowFn.java From kettle-beam with Apache License 2.0 | 4 votes |
@Override public TableRow apply( KettleRow inputRow ) { try { if ( rowMeta == null ) { readCounter = Metrics.counter( "read", counterName ); outputCounter = Metrics.counter( "output", counterName ); errorCounter = Metrics.counter( "error", counterName ); // Initialize Kettle Beam // BeamKettle.init( stepPluginClasses, xpPluginClasses ); rowMeta = JsonRowMeta.fromJson( rowMetaJson ); simpleDateFormat = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss.SSS" ); Metrics.counter( "init", counterName ).inc(); } readCounter.inc(); TableRow tableRow = new TableRow(); for (int i=0;i<rowMeta.size();i++) { ValueMetaInterface valueMeta = rowMeta.getValueMeta( i ); Object valueData = inputRow.getRow()[i]; if (!valueMeta.isNull( valueData )) { switch ( valueMeta.getType() ) { case ValueMetaInterface.TYPE_STRING: tableRow.put( valueMeta.getName(), valueMeta.getString( valueData ) ); break; case ValueMetaInterface.TYPE_INTEGER: tableRow.put( valueMeta.getName(), valueMeta.getInteger( valueData ) ); break; case ValueMetaInterface.TYPE_DATE: Date date = valueMeta.getDate( valueData ); String formattedDate = simpleDateFormat.format( date ); tableRow.put( valueMeta.getName(), formattedDate); break; case ValueMetaInterface.TYPE_BOOLEAN: tableRow.put( valueMeta.getName(), valueMeta.getBoolean( valueData ) ); break; case ValueMetaInterface.TYPE_NUMBER: tableRow.put( valueMeta.getName(), valueMeta.getNumber( valueData ) ); break; default: throw new RuntimeException( "Data type conversion from Kettle to BigQuery TableRow not supported yet: " +valueMeta.toString()); } } } // Pass the row to the process context // outputCounter.inc(); return tableRow; } catch ( Exception e ) { errorCounter.inc(); LOG.info( "Conversion error KettleRow to BigQuery TableRow : " + e.getMessage() ); throw new RuntimeException( "Error converting KettleRow to BigQuery TableRow", e ); } }
Example 5
Source File: CassandraOutputData.java From learning-hadoop with Apache License 2.0 | 4 votes |
/** * Adds a kettle row to a thrift-based batch (builds the map of keys to * mutations). * * @param thriftBatch * the map of keys to mutations * @param colFamilyName * the name of the column family (table) to insert into * @param inputMeta * Kettle input row meta data * @param keyIndex * the index of the incoming field to use as the key for * inserting * @param row * the Kettle row * @param cassandraMeta * meta data on the columns in the cassandra column family * (table) * @param insertFieldsNotInMetaData * true if any Kettle fields that are not in the Cassandra column * family (table) meta data are to be inserted. This is * irrelevant if the user has opted to have the step initially * update the Cassandra meta data for incoming fields that are * not known about. * * @return true if the row was added to the batch * * @throws KettleException * if a problem occurs */ public static boolean addRowToThriftBatch( Map<ByteBuffer, Map<String, List<Mutation>>> thriftBatch, String colFamilyName, RowMetaInterface inputMeta, int keyIndex, Object[] row, CassandraColumnMetaData cassandraMeta, boolean insertFieldsNotInMetaData, LogChannelInterface log, boolean isAsIndexColumn) throws KettleException { if (!preAddChecks(inputMeta, keyIndex, row, log)) { return false; } ValueMetaInterface keyMeta = inputMeta.getValueMeta(keyIndex); ByteBuffer keyBuff = cassandraMeta.kettleValueToByteBuffer(keyMeta, row[keyIndex], true); Map<String, List<Mutation>> mapCF = thriftBatch.get(keyBuff); List<Mutation> mutList = null; // check to see if we have already got some mutations for this key in // the batch if (mapCF != null) { mutList = mapCF.get(colFamilyName); } else { mapCF = new HashMap<String, List<Mutation>>(1); mutList = new ArrayList<Mutation>(); } for (int i = 0; i < inputMeta.size(); i++) { if (i != keyIndex) { ValueMetaInterface colMeta = inputMeta.getValueMeta(i); String colName = colMeta.getName(); if (!cassandraMeta.columnExistsInSchema(colName) && !insertFieldsNotInMetaData) { continue; } // don't insert if null! if (colMeta.isNull(row[i])) { continue; } Column col = new Column( cassandraMeta.columnNameToByteBuffer(colName)); if (isAsIndexColumn) { col = col.setValue(cassandraMeta.kettleValueToByteBuffer( colMeta, "-", false)); } else { col = col.setValue(cassandraMeta.kettleValueToByteBuffer( colMeta, row[i], false)); } col = col.setTimestamp(System.currentTimeMillis()); ColumnOrSuperColumn cosc = new ColumnOrSuperColumn(); cosc.setColumn(col); Mutation mut = new Mutation(); mut.setColumn_or_supercolumn(cosc); mutList.add(mut); } } // column family name -> mutations mapCF.put(colFamilyName, mutList); // row key -> column family - > mutations thriftBatch.put(keyBuff, mapCF); return true; }
Example 6
Source File: GroupBy.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private void addCumulativeAverages( Object[] row ) throws KettleValueException { // We need to adjust this row with cumulative sums // for ( int i = 0; i < data.cumulativeAvgSourceIndexes.size(); i++ ) { int sourceIndex = data.cumulativeAvgSourceIndexes.get( i ); Object previousTarget = data.previousAvgSum[ i ]; Object sourceValue = row[ sourceIndex ]; int targetIndex = data.cumulativeAvgTargetIndexes.get( i ); ValueMetaInterface sourceMeta = data.inputRowMeta.getValueMeta( sourceIndex ); ValueMetaInterface targetMeta = data.outputRowMeta.getValueMeta( targetIndex ); // If the first values where null, or this is the first time around, just take the source value... // Object sum = null; if ( targetMeta.isNull( previousTarget ) ) { sum = sourceMeta.convertToNormalStorageType( sourceValue ); } else { // If the source value is null, just take the previous target value // if ( sourceMeta.isNull( sourceValue ) ) { sum = previousTarget; } else { if ( sourceMeta.isInteger() ) { sum = ValueDataUtil.plus( data.valueMetaInteger, data.previousAvgSum[ i ], sourceMeta, row[ sourceIndex ] ); } else { sum = ValueDataUtil.plus( targetMeta, data.previousAvgSum[ i ], sourceMeta, row[ sourceIndex ] ); } } } data.previousAvgSum[ i ] = sum; if ( !sourceMeta.isNull( sourceValue ) ) { data.previousAvgCount[ i ]++; } if ( sourceMeta.isInteger() ) { // Change to number as the exception // if ( sum == null ) { row[ targetIndex ] = null; } else { row[ targetIndex ] = new Double( ( (Long) sum ).doubleValue() / data.previousAvgCount[ i ] ); } } else { row[ targetIndex ] = ValueDataUtil.divide( targetMeta, sum, data.valueMetaInteger, data.previousAvgCount[ i ] ); } } }
Example 7
Source File: TextFileOutput.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private void writeField( ValueMetaInterface v, Object valueData, byte[] nullString ) throws KettleStepException { try { byte[] str; // First check whether or not we have a null string set // These values should be set when a null value passes // if ( nullString != null && v.isNull( valueData ) ) { str = nullString; } else { if ( meta.isFastDump() ) { if ( valueData instanceof byte[] ) { str = (byte[]) valueData; } else { str = getBinaryString( ( valueData == null ) ? "" : valueData.toString() ); } } else { str = formatField( v, valueData ); } } if ( str != null && str.length > 0 ) { List<Integer> enclosures = null; boolean writeEnclosures = false; if ( v.isString() ) { if ( meta.isEnclosureForced() && !meta.isPadded() ) { writeEnclosures = true; } else if ( !meta.isEnclosureFixDisabled() && containsSeparatorOrEnclosure( str, data.binarySeparator, data.binaryEnclosure ) ) { writeEnclosures = true; } } if ( writeEnclosures ) { data.writer.write( data.binaryEnclosure ); enclosures = getEnclosurePositions( str ); } if ( enclosures == null ) { data.writer.write( str ); } else { // Skip the enclosures, double them instead... int from = 0; for ( int i = 0; i < enclosures.size(); i++ ) { int position = enclosures.get( i ); data.writer.write( str, from, position + data.binaryEnclosure.length - from ); data.writer.write( data.binaryEnclosure ); // write enclosure a second time from = position + data.binaryEnclosure.length; } if ( from < str.length ) { data.writer.write( str, from, str.length - from ); } } if ( writeEnclosures ) { data.writer.write( data.binaryEnclosure ); } } } catch ( Exception e ) { throw new KettleStepException( "Error writing field content to file", e ); } }
Example 8
Source File: ExcelInput.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (ExcelInputMeta) smi; data = (ExcelInputData) sdi; if ( first ) { first = false; data.outputRowMeta = new RowMeta(); // start from scratch! meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); if ( meta.isAcceptingFilenames() ) { // Read the files from the specified input stream... data.files.getFiles().clear(); int idx = -1; RowSet rowSet = findInputRowSet( meta.getAcceptingStepName() ); Object[] fileRow = getRowFrom( rowSet ); while ( fileRow != null ) { if ( idx < 0 ) { idx = rowSet.getRowMeta().indexOfValue( meta.getAcceptingField() ); if ( idx < 0 ) { logError( BaseMessages.getString( PKG, "ExcelInput.Error.FilenameFieldNotFound", "" + meta.getAcceptingField() ) ); setErrors( 1 ); stopAll(); return false; } } String fileValue = rowSet.getRowMeta().getString( fileRow, idx ); try { data.files.addFile( KettleVFS.getFileObject( fileValue, getTransMeta() ) ); } catch ( KettleFileException e ) { throw new KettleException( BaseMessages.getString( PKG, "ExcelInput.Exception.CanNotCreateFileObject", fileValue ), e ); } // Grab another row fileRow = getRowFrom( rowSet ); } } handleMissingFiles(); } // See if we're not done processing... // We are done processing if the filenr >= number of files. if ( data.filenr >= data.files.nrOfFiles() ) { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "ExcelInput.Log.NoMoreFiles", "" + data.filenr ) ); } setOutputDone(); // signal end to receiver(s) return false; // end of data or error. } if ( meta.getRowLimit() > 0 && getLinesInput() >= meta.getRowLimit() ) { // The close of the openFile is in dispose() if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "ExcelInput.Log.RowLimitReached", "" + meta.getRowLimit() ) ); } setOutputDone(); // signal end to receiver(s) return false; // end of data or error. } Object[] r = getRowFromWorkbooks(); if ( r != null ) { incrementLinesInput(); // OK, see if we need to repeat values. if ( data.previousRow != null ) { for ( int i = 0; i < meta.getField().length; i++ ) { ValueMetaInterface valueMeta = data.outputRowMeta.getValueMeta( i ); Object valueData = r[i]; if ( valueMeta.isNull( valueData ) && meta.getField()[i].isRepeated() ) { // Take the value from the previous row. r[i] = data.previousRow[i]; } } } // Remember this row for the next time around! data.previousRow = data.outputRowMeta.cloneRow( r ); // Send out the good news: we found a row of data! putRow( data.outputRowMeta, r ); return true; } else { // This row is ignored / eaten // We continue though. return true; } }
Example 9
Source File: BaseDatabaseMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public String getSQLValue( ValueMetaInterface valueMeta, Object valueData, String dateFormat ) throws KettleValueException { StringBuilder ins = new StringBuilder(); if ( valueMeta.isNull( valueData ) ) { ins.append( "null" ); } else { // Normal cases... // switch ( valueMeta.getType() ) { case ValueMetaInterface.TYPE_BOOLEAN: case ValueMetaInterface.TYPE_STRING: String string = valueMeta.getString( valueData ); // Have the database dialect do the quoting. // This also adds the single quotes around the string (thanks to PostgreSQL) // string = quoteSQLString( string ); ins.append( string ); break; case ValueMetaInterface.TYPE_DATE: Date date = valueMeta.getDate( valueData ); if ( Utils.isEmpty( dateFormat ) ) { ins.append( "'" + valueMeta.getString( valueData ) + "'" ); } else { try { java.text.SimpleDateFormat formatter = new java.text.SimpleDateFormat( dateFormat ); ins.append( "'" + formatter.format( date ) + "'" ); } catch ( Exception e ) { throw new KettleValueException( "Error : ", e ); } } break; default: ins.append( valueMeta.getString( valueData ) ); break; } } return ins.toString(); }
Example 10
Source File: AggregateRows.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private synchronized void AddAggregate( RowMetaInterface rowMeta, Object[] r ) throws KettleValueException { for ( int i = 0; i < data.fieldnrs.length; i++ ) { ValueMetaInterface valueMeta = rowMeta.getValueMeta( data.fieldnrs[i] ); Object valueData = r[data.fieldnrs[i]]; if ( !valueMeta.isNull( valueData ) ) { data.counts[i]++; // only count non-zero values! switch ( meta.getAggregateType()[i] ) { case AggregateRowsMeta.TYPE_AGGREGATE_SUM: case AggregateRowsMeta.TYPE_AGGREGATE_AVERAGE: Double number = valueMeta.getNumber( valueData ); if ( data.values[i] == null ) { data.values[i] = number; } else { data.values[i] = new Double( ( (Double) data.values[i] ).doubleValue() + number.doubleValue() ); } break; case AggregateRowsMeta.TYPE_AGGREGATE_MIN: if ( data.values[i] == null ) { data.values[i] = valueData; } else { if ( valueMeta.compare( data.values[i], valueData ) < 0 ) { data.values[i] = valueData; } } break; case AggregateRowsMeta.TYPE_AGGREGATE_MAX: if ( data.values[i] == null ) { data.values[i] = valueData; } else { if ( valueMeta.compare( data.values[i], valueData ) > 0 ) { data.values[i] = valueData; } } break; case AggregateRowsMeta.TYPE_AGGREGATE_NONE: case AggregateRowsMeta.TYPE_AGGREGATE_FIRST: if ( data.values[i] == null ) { data.values[i] = valueData; } break; case AggregateRowsMeta.TYPE_AGGREGATE_LAST: data.values[i] = valueData; break; default: break; } } switch ( meta.getAggregateType()[i] ) { case AggregateRowsMeta.TYPE_AGGREGATE_FIRST_NULL: // First value, EVEN if it's NULL: if ( data.values[i] == null ) { data.values[i] = valueData; } break; case AggregateRowsMeta.TYPE_AGGREGATE_LAST_NULL: // Last value, EVEN if it's NULL: data.values[i] = valueData; break; default: break; } } }
Example 11
Source File: AddXML.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private String formatField( ValueMetaInterface valueMeta, Object valueData, XMLField field ) throws KettleValueException { String retval = ""; if ( field == null ) { return ""; } if ( valueMeta == null || valueMeta.isNull( valueData ) ) { String defaultNullValue = field.getNullString(); return Utils.isEmpty( defaultNullValue ) ? "" : defaultNullValue; } if ( valueMeta.isNumeric() ) { // Formatting if ( !Utils.isEmpty( field.getFormat() ) ) { data.df.applyPattern( field.getFormat() ); } else { data.df.applyPattern( data.defaultDecimalFormat.toPattern() ); } // Decimal if ( !Utils.isEmpty( field.getDecimalSymbol() ) ) { data.dfs.setDecimalSeparator( field.getDecimalSymbol().charAt( 0 ) ); } else { data.dfs.setDecimalSeparator( data.defaultDecimalFormatSymbols.getDecimalSeparator() ); } // Grouping if ( !Utils.isEmpty( field.getGroupingSymbol() ) ) { data.dfs.setGroupingSeparator( field.getGroupingSymbol().charAt( 0 ) ); } else { data.dfs.setGroupingSeparator( data.defaultDecimalFormatSymbols.getGroupingSeparator() ); } // Currency symbol if ( !Utils.isEmpty( field.getCurrencySymbol() ) ) { data.dfs.setCurrencySymbol( field.getCurrencySymbol() ); } else { data.dfs.setCurrencySymbol( data.defaultDecimalFormatSymbols.getCurrencySymbol() ); } data.df.setDecimalFormatSymbols( data.dfs ); if ( valueMeta.isBigNumber() ) { retval = data.df.format( valueMeta.getBigNumber( valueData ) ); } else if ( valueMeta.isNumber() ) { retval = data.df.format( valueMeta.getNumber( valueData ) ); } else { // Integer retval = data.df.format( valueMeta.getInteger( valueData ) ); } } else if ( valueMeta.isDate() ) { if ( field != null && !Utils.isEmpty( field.getFormat() ) && valueMeta.getDate( valueData ) != null ) { if ( !Utils.isEmpty( field.getFormat() ) ) { data.daf.applyPattern( field.getFormat() ); } else { data.daf.applyPattern( data.defaultDateFormat.toLocalizedPattern() ); } data.daf.setDateFormatSymbols( data.dafs ); retval = data.daf.format( valueMeta.getDate( valueData ) ); } else { if ( valueMeta.isNull( valueData ) ) { if ( field != null && !Utils.isEmpty( field.getNullString() ) ) { retval = field.getNullString(); } } else { retval = valueMeta.getString( valueData ); } } } else if ( valueMeta.isString() ) { retval = valueMeta.getString( valueData ); } else if ( valueMeta.isBinary() ) { if ( valueMeta.isNull( valueData ) ) { if ( !Utils.isEmpty( field.getNullString() ) ) { retval = field.getNullString(); } else { retval = Const.NULL_BINARY; } } else { try { retval = new String( valueMeta.getBinary( valueData ), "UTF-8" ); } catch ( UnsupportedEncodingException e ) { // chances are small we'll get here. UTF-8 is // mandatory. retval = Const.NULL_BINARY; } } } else { // Boolean retval = valueMeta.getString( valueData ); } return retval; }
Example 12
Source File: SalesforceUpsert.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@VisibleForTesting void writeToSalesForce( Object[] rowData ) throws KettleException { try { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "SalesforceUpsert.CalledWrite", data.iBufferPos, meta .getBatchSizeInt() ) ); } // if there is room in the buffer if ( data.iBufferPos < meta.getBatchSizeInt() ) { // Reserve for empty fields ArrayList<String> fieldsToNull = new ArrayList<String>(); ArrayList<XmlObject> upsertfields = new ArrayList<>(); // Add fields to update for ( int i = 0; i < data.nrfields; i++ ) { ValueMetaInterface valueMeta = data.inputRowMeta.getValueMeta( data.fieldnrs[i] ); Object object = rowData[data.fieldnrs[i]]; if ( valueMeta.isNull( object ) ) { // The value is null // We need to keep track of this field fieldsToNull.add( SalesforceUtils.getFieldToNullName( log, meta.getUpdateLookup()[i], meta .getUseExternalId()[i] ) ); } else { Object normalObject = normalizeValue( valueMeta, rowData[data.fieldnrs[i]] ); if ( data.mapData && data.dataTypeMap != null ) { normalObject = mapDataTypes( valueMeta.getType(), meta.getUpdateLookup()[i], normalObject ); } upsertfields.add( SalesforceConnection.createMessageElement( meta.getUpdateLookup()[i], normalObject, meta .getUseExternalId()[i] ) ); } } // build the SObject SObject sobjPass = new SObject(); sobjPass.setType( data.connection.getModule() ); if ( upsertfields.size() > 0 ) { for ( XmlObject element : upsertfields ) { setFieldInSObject( sobjPass, element ); } } if ( fieldsToNull.size() > 0 ) { // Set Null to fields sobjPass.setFieldsToNull( fieldsToNull.toArray( new String[fieldsToNull.size()] ) ); } // Load the buffer array data.sfBuffer[data.iBufferPos] = sobjPass; data.outputBuffer[data.iBufferPos] = rowData; data.iBufferPos++; } if ( data.iBufferPos >= meta.getBatchSizeInt() ) { if ( log.isDetailed() ) { logDetailed( "Calling flush buffer from writeToSalesForce" ); } flushBuffers(); } } catch ( KettleException ke ) { throw ke; } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceUpsert.FailedInWrite", e.toString() ) ); } }
Example 13
Source File: SalesforceInsert.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@VisibleForTesting void writeToSalesForce( Object[] rowData ) throws KettleException { try { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "SalesforceInsert.WriteToSalesforce", data.iBufferPos, meta .getBatchSizeInt() ) ); } // if there is room in the buffer if ( data.iBufferPos < meta.getBatchSizeInt() ) { ArrayList<XmlObject> insertfields = new ArrayList<>(); // Reserve for empty fields ArrayList<String> fieldsToNull = new ArrayList<String>(); // Add fields to insert for ( int i = 0; i < data.nrfields; i++ ) { ValueMetaInterface valueMeta = data.inputRowMeta.getValueMeta( data.fieldnrs[i] ); Object value = rowData[data.fieldnrs[i]]; if ( valueMeta.isNull( value ) ) { // The value is null // We need to keep track of this field fieldsToNull.add( SalesforceUtils.getFieldToNullName( log, meta.getUpdateLookup()[i], meta .getUseExternalId()[i] ) ); } else { Object normalObject = normalizeValue( valueMeta, value ); insertfields.add( SalesforceConnection.createMessageElement( meta.getUpdateLookup()[i], normalObject, meta.getUseExternalId()[i] ) ); } } // build the SObject SObject sobjPass = new SObject(); sobjPass.setType( data.connection.getModule() ); if ( insertfields.size() > 0 ) { for ( XmlObject element : insertfields ) { sobjPass.setSObjectField( element.getName().getLocalPart(), element.getValue() ); } } if ( fieldsToNull.size() > 0 ) { // Set Null to fields sobjPass.setFieldsToNull( fieldsToNull.toArray( new String[fieldsToNull.size()] ) ); } // Load the buffer array data.sfBuffer[data.iBufferPos] = sobjPass; data.outputBuffer[data.iBufferPos] = rowData; data.iBufferPos++; } if ( data.iBufferPos >= meta.getBatchSizeInt() ) { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "SalesforceInsert.CallingFlushBuffer" ) ); } flushBuffers(); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInsert.Error", e.getMessage() ) ); } }