org.pentaho.di.core.RowMetaAndData Java Examples
The following examples show how to use
org.pentaho.di.core.RowMetaAndData.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RowsToResult.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (RowsToResultMeta) smi; data = (RowsToResultData) sdi; Object[] r = getRow(); // get row, set busy! if ( r == null ) { // no more input to be expected... getTrans().getResultRows().addAll( data.rows ); getTrans().setResultRowSet( true ); setOutputDone(); return false; } // Add all rows to rows buffer... data.rows.add( new RowMetaAndData( getInputRowMeta(), r ) ); data.outputRowMeta = getInputRowMeta().clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); putRow( data.outputRowMeta, r ); // copy row to possible alternate // rowset(s). if ( checkFeedback( getLinesRead() ) ) { logBasic( BaseMessages.getString( PKG, "RowsToResult.Log.LineNumber" ) + getLinesRead() ); } return true; }
Example #2
Source File: UniqueRowsIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public List<RowMetaAndData> createResultDataCaseSensitiveNoPreviousSort() { List<RowMetaAndData> list = new ArrayList<RowMetaAndData>(); RowMetaInterface rm = createRowMetaInterface(); Object[] r1 = new Object[] { "abc" }; Object[] r2 = new Object[] { "ABC" }; Object[] r3 = new Object[] { "abc" }; Object[] r4 = new Object[] { "ABC" }; list.add( new RowMetaAndData( rm, r1 ) ); list.add( new RowMetaAndData( rm, r2 ) ); list.add( new RowMetaAndData( rm, r3 ) ); list.add( new RowMetaAndData( rm, r4 ) ); return list; }
Example #3
Source File: KettleDatabaseRepositoryConnectionDelegate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public RowMetaAndData getJobAttributeRow( ObjectId id_job, int nr, String code ) throws KettleException { RowMetaAndData par = new RowMetaAndData(); par.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_ID_JOB ), id_job ); par.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_CODE ), code ); par.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ATTRIBUTE_NR ), new Long( nr ) ); if ( psJobAttributesLookup == null ) { setLookupJobAttribute(); } database.setValues( par, psJobAttributesLookup ); return callRead( new Callable<RowMetaAndData>() { @Override public RowMetaAndData call() throws Exception { Object[] r = database.getLookup( psJobAttributesLookup ); if ( r == null ) { return null; } return new RowMetaAndData( database.getReturnRowMeta(), r ); } } ); }
Example #4
Source File: PentahoOrcRecordWriter.java From pentaho-hadoop-shims with Apache License 2.0 | 6 votes |
public PentahoOrcRecordWriter( List<? extends IOrcOutputField> fields, TypeDescription schema, String filePath, Configuration conf ) { this.fields = fields; this.schema = schema; final AtomicInteger fieldNumber = new AtomicInteger(); //Mutable field count fields.forEach( field -> setOutputMeta( fieldNumber, field ) ); outputRowMetaAndData = new RowMetaAndData( outputRowMeta, new Object[ fieldNumber.get() ] ); try { S3NCredentialUtils.applyS3CredentialsToHadoopConfigurationIfNecessary( filePath, conf ); Path outputFile = new Path( S3NCredentialUtils.scrubFilePathIfNecessary( filePath ) ); writer = OrcFile.createWriter( outputFile, OrcFile.writerOptions( conf ) .setSchema( schema ) ); batch = schema.createRowBatch(); } catch ( IOException e ) { logger.error( e ); } //Write the addition metadata for the fields // new OrcMetaDataWriter( writer ).write( fields ); }
Example #5
Source File: AvroNestedRecordReader.java From pentaho-hadoop-shims with Apache License 2.0 | 6 votes |
private RowMetaAndData getNextRowMetaAndData() { if ( hasExpandedRows() == false ) { try { nextExpandedRow = 0; expandedRows = null; expandedRows = avroNestedReader.avroObjectToKettle( incomingFields, avroInputStep ); if ( expandedRows != null ) { nextRow = objectToRowMetaAndData( expandedRows[ nextExpandedRow ] ); } else { return null; } } catch ( KettleException e ) { e.printStackTrace(); } } nextRow = objectToRowMetaAndData( expandedRows[ nextExpandedRow ] ); nextExpandedRow++; return nextRow; }
Example #6
Source File: KettleDatabaseRepositoryConnectionDelegate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public synchronized ObjectId insertJobEntryAttribute( ObjectId id_job, ObjectId id_jobentry, long nr, String code, double value_num, String value_str ) throws KettleException { ObjectId id = getNextJobEntryAttributeID(); RowMetaAndData table = new RowMetaAndData(); //CHECKSTYLE:LineLength:OFF table .addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY_ATTRIBUTE ), id ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOB ), id_job ); table .addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_ID_JOBENTRY ), id_jobentry ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_NR ), new Long( nr ) ); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_CODE ), code ); table.addValue( new ValueMetaNumber( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_NUM ), new Double( value_num ) ); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOBENTRY_ATTRIBUTE_VALUE_STR ), value_str ); database.prepareInsert( table.getRowMeta(), KettleDatabaseRepository.TABLE_R_JOBENTRY_ATTRIBUTE ); database.setValuesInsert( table ); database.insertRow(); database.closeInsert(); return id; }
Example #7
Source File: RegexEvalIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void testRegexEval1() throws Exception { String regexStepName = "regexeval"; RegexEvalMeta regexEvalMeta = new RegexEvalMeta(); regexEvalMeta.setScript( "[abc]*" ); regexEvalMeta.setMatcher( "field1" ); regexEvalMeta.setResultFieldName( "res" ); TransMeta transMeta = TransTestFactory.generateTestTransformation( new Variables(), regexEvalMeta, regexStepName ); // Now execute the transformation and get the result from the dummy step. // List<RowMetaAndData> result = TransTestFactory.executeTestTransformation( transMeta, TransTestFactory.INJECTOR_STEPNAME, regexStepName, TransTestFactory.DUMMY_STEPNAME, createSourceData() ); checkRows( createResultData1(), result ); }
Example #8
Source File: KettleDatabaseRepositoryConnectionDelegate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public synchronized ObjectId getIDWithValue( String tablename, String idfield, String lookupfield, String value, String lookupkey, ObjectId key ) throws KettleException { RowMetaAndData par = new RowMetaAndData(); par.addValue( new ValueMetaString( "value" ), value ); par.addValue( new ValueMetaInteger( "key" ), new LongObjectId( key ) ); RowMetaAndData result = getOneRow( "SELECT " + idfield + " FROM " + tablename + " WHERE " + lookupfield + " = ? AND " + lookupkey + " = ?", par .getRowMeta(), par.getData() ); if ( result != null && result.getRowMeta() != null && result.getData() != null && result.isNumeric( 0 ) ) { return new LongObjectId( result.getInteger( 0, 0 ) ); } return null; }
Example #9
Source File: KettleDatabaseRepositoryConnectionDelegate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private RowMetaAndData getStepAttributeRow( ObjectId id_step, int nr, String code ) throws KettleException { RowMetaAndData par = new RowMetaAndData(); par.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP ), id_step ); par.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE ), code ); par.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_NR ), new Long( nr ) ); if ( psStepAttributesLookup == null ) { setLookupStepAttribute(); } database.setValues( par.getRowMeta(), par.getData(), psStepAttributesLookup ); return callRead( new Callable<RowMetaAndData>() { @Override public RowMetaAndData call() throws Exception { Object[] lookup = database.getLookup( psStepAttributesLookup ); return new RowMetaAndData( database.getReturnRowMeta(), lookup ); } } ); }
Example #10
Source File: TableOutputIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Create the result rows for the JIRA897 case. */ public List<RowMetaAndData> createJIRA897ResultDataRows() { List<RowMetaAndData> list = new ArrayList<RowMetaAndData>(); RowMetaInterface rm = createSourceRowMetaInterface1(); Object[] r1 = new Object[] { new Long( 100L ), new Long( 1000L ) }; Object[] r2 = new Object[] { new Long( 101L ), new Long( 1001L ) }; Object[] r3 = new Object[] { new Long( 102L ), new Long( 1002L ) }; Object[] r4 = new Object[] { new Long( 103L ), new Long( 1003L ) }; Object[] r5 = new Object[] { new Long( 104L ), new Long( 1004L ) }; Object[] r6 = new Object[] { new Long( 105L ), new Long( 1005L ) }; Object[] r7 = new Object[] { new Long( 106L ), new Long( 1006L ) }; list.add( new RowMetaAndData( rm, r1 ) ); list.add( new RowMetaAndData( rm, r2 ) ); list.add( new RowMetaAndData( rm, r3 ) ); list.add( new RowMetaAndData( rm, r4 ) ); list.add( new RowMetaAndData( rm, r5 ) ); list.add( new RowMetaAndData( rm, r6 ) ); list.add( new RowMetaAndData( rm, r7 ) ); return list; }
Example #11
Source File: UpdateIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void pumpRows( List<RowMetaAndData> inputList ) throws Exception { trans.startThreads(); // add rows for ( RowMetaAndData rm : inputList ) { rp.putRow( rm.getRowMeta(), rm.getData() ); } rp.finished(); trans.waitUntilFinished(); if ( trans.getErrors() > 0 ) { fail( "test transformation failed, check logs!" ); } }
Example #12
Source File: DetectLastRowStepIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Create data rows. * * @param nrRows * nr of rows to insert (from 0 to 3 for the moment) * * @return List of row and meta data */ public List<RowMetaAndData> createData( int nrRows ) { List<RowMetaAndData> list = new ArrayList<RowMetaAndData>(); RowMetaInterface rm = createRowMetaInterface(); Object[] r1 = new Object[] { "KETTLE1", new Long( 123L ), new Double( 10.5D ), Boolean.TRUE, BigDecimal.valueOf( 123.45 ) }; Object[] r2 = new Object[] { "KETTLE2", new Long( 500L ), new Double( 20.0D ), Boolean.FALSE, BigDecimal.valueOf( 123.45 ) }; Object[] r3 = new Object[] { "KETTLE3", new Long( 501L ), new Double( 21.0D ), Boolean.FALSE, BigDecimal.valueOf( 123.45 ) }; list.add( new RowMetaAndData( rm, r1 ) ); list.add( new RowMetaAndData( rm, r2 ) ); list.add( new RowMetaAndData( rm, r3 ) ); return list.subList( 0, nrRows ); }
Example #13
Source File: SubtransExecutorTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Test public void stopsAll() throws KettleException { TransMeta parentMeta = new TransMeta( this.getClass().getResource( "subtrans-executor-parent.ktr" ).getPath(), new Variables() ); TransMeta subMeta = new TransMeta( this.getClass().getResource( "subtrans-executor-sub.ktr" ).getPath(), new Variables() ); LoggingObjectInterface loggingObject = new LoggingObject( "anything" ); Trans parentTrans = new Trans( parentMeta, loggingObject ); SubtransExecutor subtransExecutor = new SubtransExecutor( "subtransname", parentTrans, subMeta, true, new TransExecutorParameters(), "", 1001 ); subtransExecutor.running = Mockito.spy( subtransExecutor.running ); RowMetaInterface rowMeta = parentMeta.getStepFields( "Data Grid" ); List<RowMetaAndData> rows = Arrays.asList( new RowMetaAndData( rowMeta, "Pentaho", 1L ), new RowMetaAndData( rowMeta, "Pentaho", 2L ), new RowMetaAndData( rowMeta, "Pentaho", 3L ), new RowMetaAndData( rowMeta, "Pentaho", 4L ) ); subtransExecutor.execute( rows ); verify( subtransExecutor.running ).add( any() ); subtransExecutor.stop(); assertTrue( subtransExecutor.running.isEmpty() ); }
Example #14
Source File: DatabaseLookupIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Create the result rows for a test. */ public List<RowMetaAndData> createResultDataRows() { List<RowMetaAndData> list = new ArrayList<RowMetaAndData>(); RowMetaInterface rm = createResultRowMetaInterface(); Object[] r1 = new Object[] { new Long( 5L ), "5", new Long( 101L ), "5" }; Object[] r2 = new Object[] { new Long( 9L ), "9", new Long( 102L ), "9" }; Object[] r3 = new Object[] { new Long( 20L ), "20", new Long( -1L ), "UNDEF" }; list.add( new RowMetaAndData( rm, r1 ) ); list.add( new RowMetaAndData( rm, r2 ) ); list.add( new RowMetaAndData( rm, r3 ) ); return list; }
Example #15
Source File: CsvInput2IT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Create result data for test case 1. * * @return list of metadata/data couples of how the result should look like. */ @Override public List<RowMetaAndData> createResultData1() { List<RowMetaAndData> list = new ArrayList<RowMetaAndData>(); RowMetaInterface rm = createResultRowMetaInterface(); Object[] r1 = new Object[] { new Long( 1L ), "b0", "c0", "fileName" }; Object[] r2 = new Object[] { new Long( 2L ), "b1", "c1", "fileName" }; Object[] r3 = new Object[] { new Long( 3L ), "b2", "c2", "fileName" }; list.add( new RowMetaAndData( rm, r1 ) ); list.add( new RowMetaAndData( rm, r2 ) ); list.add( new RowMetaAndData( rm, r3 ) ); return list; }
Example #16
Source File: KettleDatabaseRepositoryTransDelegate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public String[] getTransformationsWithIDList( List<Object[]> list, RowMetaInterface rowMeta ) throws KettleException { String[] transList = new String[list.size()]; for ( int i = 0; i < list.size(); i++ ) { long id_transformation = rowMeta.getInteger( list.get( i ), quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION ), -1L ); if ( id_transformation > 0 ) { RowMetaAndData transRow = getTransformation( new LongObjectId( id_transformation ) ); if ( transRow != null ) { String transName = transRow.getString( KettleDatabaseRepository.FIELD_TRANSFORMATION_NAME, "<name not found>" ); long id_directory = transRow.getInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY, -1L ); RepositoryDirectoryInterface dir = repository.loadRepositoryDirectoryTree().findDirectory( new LongObjectId( id_directory ) ); transList[i] = dir.getPathObjectCombination( transName ); } } } return transList; }
Example #17
Source File: TableCompareIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Test compare table reference table is empty * * @throws IOException * @throws KettleException */ @Test public void testValueNotExistedInCompare() throws IOException, KettleException { executeSqlPrecondition( "reference_only.sql" ); TableCompareMeta meta = getTableCompareMeta(); List<RowMetaAndData> inputData = new ArrayList<RowMetaAndData>(); inputData.add( new RowMetaAndData( getRowMeta(), getData3() ) ); TransMeta trMeta = TransTestFactory.generateTestTransformationError( null, meta, "junit" ); Map<String, RowStepCollector> result = TransTestFactory .executeTestTransformationError( trMeta, "junit", inputData ); List<RowMetaAndData> read = result.get( TransTestFactory.DUMMY_STEPNAME ).getRowsRead(); List<RowMetaAndData> errors = result.get( TransTestFactory.ERROR_STEPNAME ).getRowsRead(); RowMetaAndData row = read.get( 0 ); Assert.assertEquals( "Errors reported", 4, row.getInteger( 8 ).intValue() ); Assert.assertEquals( "Reference table row count", 4, row.getInteger( 9 ).intValue() ); Assert.assertEquals( "Compare table row count", 0, row.getInteger( 10 ).intValue() ); Assert.assertEquals( "Number of left joins errors", 4, row.getInteger( 11 ).intValue() ); Assert.assertEquals( "Number of inner joins errors", 0, row.getInteger( 12 ).intValue() ); Assert.assertEquals( "Number of right joins errors", 0, row.getInteger( 13 ).intValue() ); Assert.assertEquals( "4 error rows passed to error output", 4, errors.size() ); }
Example #18
Source File: PentahoAvroReadWriteTest.java From pentaho-hadoop-shims with Apache License 2.0 | 6 votes |
private void testRecordWriter( List<AvroOutputField> avroOutputFields, RowMeta rowMeta, Object[] rowData, IPentahoAvroOutputFormat.COMPRESSION compressionType, String filePath, boolean overwrite ) throws Exception { PentahoAvroOutputFormat avroOutputFormat = new PentahoAvroOutputFormat(); avroOutputFormat.setNameSpace( "nameSpace" ); avroOutputFormat.setRecordName( "recordName" ); avroOutputFormat.setFields( avroOutputFields ); avroOutputFormat.setCompression( compressionType ); avroOutputFormat.setOutputFile( filePath, overwrite ); IPentahoOutputFormat.IPentahoRecordWriter avroRecordWriter = avroOutputFormat.createRecordWriter(); Assert.assertNotNull( avroRecordWriter, "avroRecordWriter should NOT be null!" ); Assert.assertTrue( avroRecordWriter instanceof PentahoAvroRecordWriter, "avroRecordWriter should be instance of PentahoAvroRecordWriter" ); avroRecordWriter.write( new RowMetaAndData( rowMeta, rowData ) ); try { avroRecordWriter.close(); } catch ( Exception e ) { e.printStackTrace(); } }
Example #19
Source File: TransSingleThreadIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public List<RowMetaAndData> createData() { List<RowMetaAndData> list = new ArrayList<RowMetaAndData>(); RowMetaInterface rm = createRowMetaInterface(); Object[] r1 = new Object[] { "KETTLE1", new Long( 123L ), new Double( 10.5D ), new Date(), Boolean.TRUE, BigDecimal.valueOf( 123.45 ), BigDecimal.valueOf( 123.60 ) }; Object[] r2 = new Object[] { "KETTLE2", new Long( 500L ), new Double( 20.0D ), new Date(), Boolean.FALSE, BigDecimal.valueOf( 123.45 ), BigDecimal.valueOf( 123.60 ) }; Object[] r3 = new Object[] { "KETTLE3", new Long( 501L ), new Double( 21.0D ), new Date(), Boolean.FALSE, BigDecimal.valueOf( 123.45 ), BigDecimal.valueOf( 123.70 ) }; list.add( new RowMetaAndData( rm, r1 ) ); list.add( new RowMetaAndData( rm, r2 ) ); list.add( new RowMetaAndData( rm, r3 ) ); return list; }
Example #20
Source File: KettleDatabaseRepositoryMetaStore.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Override public List<String> getNamespaces() throws MetaStoreException { try { List<String> namespaces = new ArrayList<String>(); Collection<RowMetaAndData> namespaceRows = delegate.getNamespaces(); for ( RowMetaAndData namespaceRow : namespaceRows ) { String namespace = namespaceRow.getString( KettleDatabaseRepository.FIELD_NAMESPACE_NAME, null ); if ( !Utils.isEmpty( namespace ) ) { namespaces.add( namespace ); } } return namespaces; } catch ( Exception e ) { throw new MetaStoreException( e ); } }
Example #21
Source File: XMLInputFieldsImportProgressDialog.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public XMLInputFieldsImportProgressDialog( Shell shell, GetXMLDataMeta meta, String xmlSource, boolean useUrl, String loopXPath ) { this.shell = shell; this.meta = meta; this.fields = null; this.filename = null; this.encoding = null; this.nr = 0; this.loopXPath = loopXPath; this.list = new HashSet<String>(); this.fieldsList = new ArrayList<RowMetaAndData>(); if ( useUrl ) { this.xml = null; this.url = xmlSource; } else { this.xml = xmlSource; this.url = null; } }
Example #22
Source File: PentahoAvroReadWriteTest.java From pentaho-hadoop-shims with Apache License 2.0 | 6 votes |
@Test public void testParseDateOnInput() throws Exception { Object[] rowData = new Object[] { "2000-01-02" }; String[][] outputSchemaDescription = new String[][] { { "avroDate8", "pentahoDate8", String.valueOf( AvroSpec.DataType.STRING.ordinal() ), String.valueOf( ValueMetaInterface.TYPE_STRING ), "0", "0" } }; String[][] inputSchemaDescription = new String[][] { { "avroDate8", "pentahoDate8", String.valueOf( AvroSpec.DataType.STRING.ordinal() ), String.valueOf( ValueMetaInterface.TYPE_DATE ), "0", "0", "yyyy-MM-dd" } }; RowMeta rowMeta = buildRowMeta( outputSchemaDescription ); RowMetaAndData rowMetaAndData = new RowMetaAndData( rowMeta, rowData ); SimpleDateFormat format = new SimpleDateFormat( "yyyy-MM-dd" ); Date[] expectedResults = new Date[] { format.parse( "2000-01-02" ) }; doReadWrite( inputSchemaDescription, outputSchemaDescription, rowData, IPentahoAvroOutputFormat.COMPRESSION.UNCOMPRESSED, "avroOutputNone.avro", null, expectedResults, true ); }
Example #23
Source File: PentahoOrcReadWriteTest.java From pentaho-hadoop-shims with Apache License 2.0 | 6 votes |
/** * Read the rows back from Orc file * * @throws Exception */ private void testRecordReader() throws Exception { PluginRegistry.addPluginType( ValueMetaPluginType.getInstance() ); PluginRegistry.init( true ); PentahoOrcInputFormat pentahoOrcInputFormat = new PentahoOrcInputFormat( mock( NamedCluster.class ) ); pentahoOrcInputFormat.setSchema( orcInputFields ); pentahoOrcInputFormat.setInputFile( filePath ); IPentahoInputFormat.IPentahoRecordReader pentahoRecordReader = pentahoOrcInputFormat.createRecordReader( null ); final AtomicInteger rowNumber = new AtomicInteger(); for ( RowMetaAndData row : pentahoRecordReader ) { final AtomicInteger fieldNumber = new AtomicInteger(); orcInputFields.forEach( field -> testValue( field, row, rowNumber, fieldNumber ) ); rowNumber.incrementAndGet(); } }
Example #24
Source File: StreamLookupData.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public StreamLookupData() { super(); look = new HashMap<RowMetaAndData, Object[]>(); hashIndex = null; longIndex = new LongHashIndex(); list = new ArrayList<KeyValue>(); metadataVerifiedIntegerPair = false; hasLookupRows = false; comparator = new Comparator<KeyValue>() { public int compare( KeyValue k1, KeyValue k2 ) { try { return cacheKeyMeta.compare( k1.getKey(), k2.getKey() ); } catch ( KettleValueException e ) { throw new RuntimeException( "Stream Lookup comparator error", e ); } } }; }
Example #25
Source File: CommonFormatShimTest.java From pentaho-hadoop-shims with Apache License 2.0 | 6 votes |
@Test public void testAvroNestedReadLocalFileSystem() throws Exception { List<String> expectedRows = Arrays.asList( "John;4074549921", "Leslie;4079302194" ); PentahoAvroInputFormat avroInputFormat = new PentahoAvroInputFormat( mock( NamedCluster.class ) ); avroInputFormat.setInputSchemaFile( getFilePath( "/sample-schema.avro" ) ); avroInputFormat.setInputFile( getFilePath( "/sample-data.avro" ) ); avroInputFormat.setUseFieldAsInputStream( false ); avroInputFormat.setIsDataBinaryEncoded( true ); List<AvroInputField> inputFields = new ArrayList<>(); addStringField( inputFields, "FirstName" ); addStringField( inputFields, "Phone" ); avroInputFormat.setInputFields( inputFields ); RowMetaAndData row = new RowMetaAndData(); RowMeta rowMeta = new RowMeta(); rowMeta.addValueMeta( new ValueMetaString( "FirstName" ) ); rowMeta.addValueMeta( new ValueMetaString( "Phone" ) ); row.setRowMeta( rowMeta ); avroInputFormat.setOutputRowMeta( rowMeta ); IPentahoRecordReader recordReader = avroInputFormat.createRecordReader( null ); assertEquals( expectedRows, generateDataSample( recordReader, inputFields ) ); }
Example #26
Source File: KettleDatabaseRepositoryJobDelegate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public String[] getJobsWithIDList( List<Object[]> list, RowMetaInterface rowMeta ) throws KettleException { String[] jobList = new String[list.size()]; for ( int i = 0; i < list.size(); i++ ) { long id_job = rowMeta.getInteger( list.get( i ), quote( KettleDatabaseRepository.FIELD_JOB_ID_JOB ), -1L ); if ( id_job > 0 ) { RowMetaAndData jobRow = getJob( new LongObjectId( id_job ) ); if ( jobRow != null ) { String jobName = jobRow.getString( KettleDatabaseRepository.FIELD_JOB_NAME, "<name not found>" ); long id_directory = jobRow.getInteger( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY, -1L ); RepositoryDirectoryInterface dir = repository.loadRepositoryDirectoryTree().findDirectory( new LongObjectId( id_directory ) ); // always // reload the // directory // tree! jobList[i] = dir.getPathObjectCombination( jobName ); } } } return jobList; }
Example #27
Source File: SortRowsIT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
/** * Test case for sorting step .. descending order on "numeric" data. */ @Test public void testSortRows2() throws Exception { SortRowsMeta srm = new SortRowsMeta(); srm.setSortSize( Integer.toString( MAX_COUNT / 10 ) ); String[] sortFields = { "KEY1", "KEY2" }; boolean[] ascendingFields = { false, false }; boolean[] caseSensitive = { true, true }; boolean[] presortedFields = { false, false }; srm.setFieldName( sortFields ); srm.setAscending( ascendingFields ); srm.setCaseSensitive( caseSensitive ); srm.setPreSortedField( presortedFields ); srm.setPrefix( "SortRowsTest" ); srm.setDirectory( "." ); TransMeta transMeta = TransTestFactory.generateTestTransformation( null, srm, sortRowsStepname ); // add rows List<RowMetaAndData> inputList = createStringData(); List<RowMetaAndData> ret = TransTestFactory.executeTestTransformation( transMeta, TransTestFactory.INJECTOR_STEPNAME, sortRowsStepname, TransTestFactory.DUMMY_STEPNAME, inputList ); checkStringRows( ret, false ); }
Example #28
Source File: KettleDatabaseRepositoryClusterSchemaDelegate.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public synchronized void updateCluster( ClusterSchema clusterSchema ) throws KettleException { RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER ), clusterSchema .getObjectId() ); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_NAME ), clusterSchema.getName() ); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_BASE_PORT ), clusterSchema .getBasePort() ); table .addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_BUFFER_SIZE ), clusterSchema.getSocketsBufferSize() ); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_FLUSH_INTERVAL ), clusterSchema.getSocketsFlushInterval() ); table.addValue( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CLUSTER_SOCKETS_COMPRESSED ), Boolean .valueOf( clusterSchema.isSocketsCompressed() ) ); table.addValue( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_CLUSTER_DYNAMIC ), Boolean .valueOf( clusterSchema.isDynamic() ) ); repository.connectionDelegate.updateTableRow( KettleDatabaseRepository.TABLE_R_CLUSTER, KettleDatabaseRepository.FIELD_CLUSTER_ID_CLUSTER, table, clusterSchema.getObjectId() ); }
Example #29
Source File: CommonFormatShimTest.java From pentaho-hadoop-shims with Apache License 2.0 | 5 votes |
@Test public void testAvroBinaryDatumReadFromField() throws Exception { List<String> expectedRows = singletonList( "1;aString" ); byte[] datumBytes = Files.readAllBytes( new File( getFilePath( "/avro/flatschema.datum" ) ).toPath() ); PentahoAvroInputFormat avroInputFormat = new PentahoAvroInputFormat( mock( NamedCluster.class ) ); avroInputFormat.setDatum( true ); avroInputFormat.setUseFieldAsInputStream( true ); avroInputFormat.setInputStreamFieldName( "binaryData" ); avroInputFormat.setUseFieldAsSchema( false ); avroInputFormat.setInputSchemaFile( getFilePath( "/avro/flatschema.avsc" ) ); avroInputFormat.setIsDataBinaryEncoded( true ); avroInputFormat.setIncomingFields( new Object[] { datumBytes } ); List<AvroInputField> inputFields = new ArrayList<>(); addStringField( inputFields, "parentInt" ); addStringField( inputFields, "parentString" ); avroInputFormat.setInputFields( inputFields ); RowMetaAndData row = new RowMetaAndData(); RowMeta rowMeta = new RowMeta(); rowMeta.addValueMeta( new ValueMetaString( "parentInt" ) ); rowMeta.addValueMeta( new ValueMetaString( "parentString" ) ); row.setRowMeta( rowMeta ); avroInputFormat.setOutputRowMeta( rowMeta ); RowMeta inRowMeta = new RowMeta(); inRowMeta.addValueMeta( new ValueMetaBinary( "binaryData" ) ); avroInputFormat.setIncomingRowMeta( inRowMeta ); IPentahoRecordReader recordReader = avroInputFormat.createRecordReader( null ); assertEquals( expectedRows, generateDataSample( recordReader, inputFields ) ); }
Example #30
Source File: DatabaseLookupIT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
/** * Check the 2 lists comparing the rows in order. If they are not the same fail the test. */ public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2 ) { int idx = 1; if ( rows1.size() != rows2.size() ) { fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() ); } Iterator<RowMetaAndData> it1 = rows1.iterator(); Iterator<RowMetaAndData> it2 = rows2.iterator(); while ( it1.hasNext() && it2.hasNext() ) { RowMetaAndData rm1 = it1.next(); RowMetaAndData rm2 = it2.next(); Object[] r1 = rm1.getData(); Object[] r2 = rm2.getData(); if ( rm1.size() != rm2.size() ) { fail( "row nr " + idx + " is not equal" ); } int[] fields = new int[r1.length]; for ( int ydx = 0; ydx < r1.length; ydx++ ) { fields[ydx] = ydx; } try { if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) { fail( "row nr " + idx + " is not equal" ); } } catch ( KettleValueException e ) { fail( "row nr " + idx + " is not equal" ); } idx++; } }