org.pentaho.di.core.KettleEnvironment Java Examples
The following examples show how to use
org.pentaho.di.core.KettleEnvironment.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StreamLookupMetaTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Before public void setUpLoadSave() throws Exception { KettleEnvironment.init(); PluginRegistry.init( false ); List<String> attributes = Arrays.asList( "inputSorted", "memoryPreservationActive", "usingSortedList", "usingIntegerPair", "keystream", "keylookup", "value", "valueName", "valueDefault", "valueDefaultType" ); FieldLoadSaveValidator<String[]> stringArrayLoadSaveValidator = new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 5 ); Map<String, FieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>(); attrValidatorMap.put( "keystream", stringArrayLoadSaveValidator ); attrValidatorMap.put( "keylookup", stringArrayLoadSaveValidator ); attrValidatorMap.put( "value", stringArrayLoadSaveValidator ); attrValidatorMap.put( "valueName", stringArrayLoadSaveValidator ); attrValidatorMap.put( "valueDefault", stringArrayLoadSaveValidator ); attrValidatorMap.put( "valueDefaultType", new PrimitiveIntArrayLoadSaveValidator( new IntLoadSaveValidator( 7 ), 5 ) ); Map<String, FieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>(); loadSaveTester = new LoadSaveTester( testMetaClass, attributes, new ArrayList<String>(), new ArrayList<String>(), new HashMap<String, String>(), new HashMap<String, String>(), attrValidatorMap, typeValidatorMap, this ); }
Example #2
Source File: SFTPPutMetaTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Before public void setUpLoadSave() throws Exception { KettleEnvironment.init(); PluginRegistry.init( false ); List<String> attributes = Arrays.asList( "serverName", "serverPort", "userName", "password", "sourceFileFieldName", "remoteDirectoryFieldName", "addFilenameResut", "inputStream", "useKeyFile", "keyFilename", "keyPassPhrase", "compression", "createRemoteFolder", "proxyType", "proxyHost", "proxyPort", "proxyUsername", "proxyPassword", "destinationFolderFieldName", "createDestinationFolder", "afterFTPS", "remoteFilenameFieldName" ); Map<String, String> getterMap = new HashMap<String, String>(); Map<String, String> setterMap = new HashMap<String, String>(); Map<String, FieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>(); // See JobEntrySFTPPUT for the boundary ... chose not to create a test dependency between the Step and the JobEntry. attrValidatorMap.put( "afterFTPS", new IntLoadSaveValidator( 3 ) ); Map<String, FieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>(); loadSaveTester = new LoadSaveTester( testMetaClass, attributes, getterMap, setterMap, attrValidatorMap, typeValidatorMap ); }
Example #3
Source File: JoinRowsMetaTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Before public void setUpLoadSave() throws Exception { KettleEnvironment.init(); PluginRegistry.init( false ); List<String> attributes = Arrays.asList( "directory", "prefix", "cacheSize", "mainStepname", "condition" ); Map<String, String> getterMap = new HashMap<String, String>(); Map<String, String> setterMap = new HashMap<String, String>(); Map<String, FieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>(); attrValidatorMap.put( "condition", new ConditionLoadSaveValidator() ); Map<String, FieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>(); loadSaveTester = new LoadSaveTester( testMetaClass, attributes, getterMap, setterMap, attrValidatorMap, typeValidatorMap ); }
Example #4
Source File: KettleFileRepositoryTestBase.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Before public void setUp() throws Exception { KettleEnvironment.init(); virtualFolder = "ram://file-repo/" + UUID.randomUUID(); KettleVFS.getFileObject( virtualFolder ).createFolder(); KettleFileRepositoryMeta repositoryMeta = new KettleFileRepositoryMeta( "KettleFileRepository", "FileRep", "File repository", virtualFolder ); repository = new KettleFileRepository(); repository.init( repositoryMeta ); // Test connecting... (no security needed) // repository.connect( null, null ); assertTrue( repository.isConnected() ); // Test loading the directory tree // tree = repository.loadRepositoryDirectoryTree(); assertNotNull( tree ); }
Example #5
Source File: DynamicSQLRowMetaTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Before public void setUpLoadSave() throws Exception { KettleEnvironment.init(); PluginRegistry.init( false ); List<String> attributes = Arrays.asList( "sql", "SQLFieldName", "rowLimit", "outerJoin", "variableReplace", "databaseMeta" ); Map<String, String> getterMap = new HashMap<String, String>(); Map<String, String> setterMap = new HashMap<String, String>(); Map<String, FieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>(); attrValidatorMap.put( "databaseMeta", new DatabaseMetaLoadSaveValidator() ); Map<String, FieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>(); loadSaveTester = new LoadSaveTester( testMetaClass, attributes, getterMap, setterMap, attrValidatorMap, typeValidatorMap ); }
Example #6
Source File: HTTPMetaLoadSaveTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Before public void testLoadSaveRoundTrip() throws Exception { KettleEnvironment.init(); PluginRegistry.init( false ); List<String> attributes = Arrays.asList( "url", "urlInField", "urlField", "encoding", "httpLogin", "httpPassword", "proxyHost", "proxyPort", "socketTimeout", "connectionTimeout", "closeIdleConnectionsTime", "argumentField", "argumentParameter", "headerField", "headerParameter", "fieldName", "resultCodeFieldName", "responseTimeFieldName", "responseHeaderFieldName" ); Map<String, FieldLoadSaveValidator<?>> fieldLoadSaveValidatorAttributeMap = new HashMap<String, FieldLoadSaveValidator<?>>(); //Arrays need to be consistent length FieldLoadSaveValidator<String[]> stringArrayLoadSaveValidator = new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 25 ); fieldLoadSaveValidatorAttributeMap.put( "argumentField", stringArrayLoadSaveValidator ); fieldLoadSaveValidatorAttributeMap.put( "argumentParameter", stringArrayLoadSaveValidator ); fieldLoadSaveValidatorAttributeMap.put( "headerField", stringArrayLoadSaveValidator ); fieldLoadSaveValidatorAttributeMap.put( "headerParameter", stringArrayLoadSaveValidator ); loadSaveTester = new LoadSaveTester( HTTPMeta.class, attributes, new HashMap<String, String>(), new HashMap<String, String>(), fieldLoadSaveValidatorAttributeMap, new HashMap<String, FieldLoadSaveValidator<?>>() ); }
Example #7
Source File: MRUtil.java From pentaho-hadoop-shims with Apache License 2.0 | 6 votes |
/** * Initialize the Kettle environment with settings from the provided configuration * * @param conf Configuration to configure Kettle environment with */ private static void initKettleEnvironment( Configuration conf ) throws KettleException { if ( !KettleEnvironment.isInitialized() ) { String kettleHome = getKettleHomeProperty( conf ); String pluginDir = getPluginDirProperty( conf ); String metaStoreDir = getMetastoreDirProperty( conf ); System.setProperty( "KETTLE_HOME", kettleHome ); System.setProperty( Const.PLUGIN_BASE_FOLDERS_PROP, pluginDir ); System.setProperty( Const.PENTAHO_METASTORE_FOLDER, metaStoreDir ); KettleEnvironment.init(); log.logBasic( BaseMessages.getString( MRUtil.class, "KettleHome.Info", kettleHome ) ); log.logBasic( BaseMessages.getString( MRUtil.class, "PluginDirectory.Info", pluginDir ) ); log.logBasic( BaseMessages.getString( MRUtil.class, "MetasStoreDirectory.Info", metaStoreDir ) ); } }
Example #8
Source File: CsvInputFileEncodingTransformIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Before protected void setUp() throws Exception { super.setUp(); KettleEnvironment.init(); // // Create a new transformation... // transMeta = new TransMeta(); transMeta.setName( "csvinput1" ); registry = PluginRegistry.getInstance(); fileName = writeInputFile(); injectorStep = createInjectorStep( transMeta, registry ); csvInputStep = createCsvInputStep( transMeta, registry, "\"", false ); }
Example #9
Source File: TextFileInputIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Test public void testGetDataFromAFolderRecursivelyFromPreviousStep() throws KettleException { KettleEnvironment.init(); String path = getClass().getResource( "text-file-input-get-data-from-folder-from-previous-step.ktr" ).getPath(); Variables variables = new Variables(); variables.setVariable( "testfolder", getClass().getResource( "" ).getPath() ); TransMeta transMeta = new TransMeta( path, variables ); Trans trans = new Trans( transMeta ); trans.prepareExecution( null ); trans.startThreads(); trans.waitUntilFinished(); assertEquals( 14, trans.getSteps().get( 1 ).step.getLinesWritten() ); assertEquals( 21, trans.getSteps().get( 1 ).step.getLinesInput() ); // The path contains one entry of a folder assertEquals( 1, trans.getSteps().get( 0 ).step.getLinesWritten() ); }
Example #10
Source File: MappingInputFieldsTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@BeforeClass public static void setUpBeforeClass() throws Exception { KettleEnvironment.init(); // PluginRegistry.addPluginType(ValueMetaPluginType.getInstance()); PluginRegistry.getInstance().registerPluginType( ValueMetaPluginType.class ); Map<Class<?>, String> classes = new HashMap<Class<?>, String>(); classes.put( ValueMetaInterface.class, "org.pentaho.di.core.row.value.ValueMetaString" ); p1 = new Plugin( new String[] { "2" }, ValueMetaPluginType.class, ValueMetaInterface.class, "", "", "", "", false, true, classes, null, null, null ); classes = new HashMap<Class<?>, String>(); classes.put( ValueMetaInterface.class, "org.pentaho.di.core.row.value.ValueMetaInteger" ); p2 = new Plugin( new String[] { "5" }, ValueMetaPluginType.class, ValueMetaInterface.class, "", "", "", "", false, true, classes, null, null, null ); PluginRegistry.getInstance().registerPlugin( ValueMetaPluginType.class, p1 ); PluginRegistry.getInstance().registerPlugin( ValueMetaPluginType.class, p2 ); }
Example #11
Source File: TextFileInputIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Test public void testPDI18818() throws KettleException { KettleEnvironment.init(); String path = getClass().getResource( "text-file-input-pdi-18818.ktr" ).getPath(); Variables variables = new Variables(); variables.setVariable( "testfolder", getClass().getResource( "" ).getPath() ); TransMeta transMeta = new TransMeta( path, variables ); Trans trans = new Trans( transMeta ); trans.prepareExecution( null ); trans.startThreads(); trans.waitUntilFinished(); //Did we read both values? assertEquals( 1, trans.getSteps().get( 0 ).step.getLinesWritten() ); //Did we read both files? assertEquals( 6, trans.getSteps().get( 1 ).step.getLinesWritten() ); //Did we find any nulls? assertEquals( 0, trans.getSteps().get( 4 ).step.getLinesRead() ); }
Example #12
Source File: ZipFileMetaLoadSaveTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Before public void setUpLoadSave() throws Exception { KettleEnvironment.init(); PluginRegistry.init( false ); List<String> attributes = Arrays.asList( "dynamicSourceFileNameField", "dynamicTargetFileNameField", "baseFolderField", "moveToFolderField", "addResultFilenames", "overwriteZipEntry", "createParentFolder", "keepSouceFolder", "operationType" ); Map<String, String> getterMap = new HashMap<String, String>(); getterMap.put( "addResultFilenames", "isaddTargetFileNametoResult" ); Map<String, String> setterMap = new HashMap<String, String>(); setterMap.put( "addResultFilenames", "setaddTargetFileNametoResult" ); Map<String, FieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>(); attrValidatorMap.put( "operationType", new IntLoadSaveValidator( ZipFileMeta.operationTypeCode.length ) ); Map<String, FieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>(); loadSaveTester = new LoadSaveTester( testMetaClass, attributes, getterMap, setterMap, attrValidatorMap, typeValidatorMap ); }
Example #13
Source File: ExecSQLRowIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@BeforeClass public static void createDatabase() throws Exception { KettleEnvironment.init(); // // Create a new transformation... // TransMeta transMeta = new TransMeta(); transMeta.setName( "transname" ); // Add the database connections for ( int i = 0; i < databasesXML.length; i++ ) { DatabaseMeta databaseMeta = new DatabaseMeta( databasesXML[i] ); transMeta.addDatabase( databaseMeta ); } DatabaseMeta dbInfo = transMeta.findDatabase( "db" ); // Execute our setup SQLs in the database. database = new Database( loggingObject, dbInfo ); database.connect(); createTables( database ); createData( database ); }
Example #14
Source File: CsvInput2TrailingSpacesThreeCharEnclosuresIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Test case for Get XML Data step, very simple example. * * @throws Exception * Upon any exception */ @Test public void testCSVInput1() throws Exception { KettleEnvironment.init(); // // Create a new transformation... // TransMeta transMeta = new TransMeta(); transMeta.setName( "csvinput1" ); PluginRegistry registry = PluginRegistry.getInstance(); String fileName = writeInputFile(); StepMeta injectorStep = createInjectorStep( transMeta, registry ); StepMeta csvInputStep = createCsvInputStep( transMeta, registry, "+++", true ); createAndTestTrans( registry, transMeta, injectorStep, csvInputStep, fileName, createTextFileInputFields().length ); }
Example #15
Source File: CsvInput2IT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Test case for Get XML Data step, very simple example. * * @throws Exception * Upon any exception */ @Test public void testCSVInput1() throws Exception { KettleEnvironment.init(); // // Create a new transformation... // TransMeta transMeta = new TransMeta(); transMeta.setName( "csvinput1" ); PluginRegistry registry = PluginRegistry.getInstance(); String fileName = writeInputFile(); StepMeta injectorStep = createInjectorStep( transMeta, registry ); StepMeta csvInputStep = createCsvInputStep( transMeta, registry, "\"", true ); createAndTestTrans( registry, transMeta, injectorStep, csvInputStep, fileName, createTextFileInputFields().length ); }
Example #16
Source File: TextFileInputIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Test public void testGetDataFromFolderWithInvalidFieldName() throws KettleException { KettleEnvironment.init(); String path = getClass().getResource( "text-file-input-get-data-from-folder-from-previous-step-negative.ktr" ).getPath(); Variables variables = new Variables(); variables.setVariable( "testfolder", getClass().getResource( "" ).getPath() ); TransMeta transMeta = new TransMeta( path, variables ); Trans trans = new Trans( transMeta ); trans.prepareExecution( null ); trans.startThreads(); trans.waitUntilFinished(); assertEquals( 0, trans.getSteps().get( 1 ).step.getLinesWritten() ); assertEquals( 0, trans.getSteps().get( 1 ).step.getLinesInput() ); // The path contains one entry of a folder assertEquals( 1, trans.getSteps().get( 0 ).step.getLinesWritten() ); }
Example #17
Source File: CsvInput1IT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Test case for Get XML Data step, very simple example. * * @throws Exception * Upon any exception */ @Test public void testCSVInput1() throws Exception { KettleEnvironment.init(); // // Create a new transformation... // TransMeta transMeta = new TransMeta(); transMeta.setName( "csvinput1" ); PluginRegistry registry = PluginRegistry.getInstance(); String fileName = writeInputFile(); StepMeta injectorStep = createInjectorStep( transMeta, registry ); StepMeta csvInputStep = createCsvInputStep( transMeta, registry, "\"", true ); createAndTestTrans( registry, transMeta, injectorStep, csvInputStep, fileName, createTextFileInputFields().length ); }
Example #18
Source File: GetXMLDataTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void testGetXMLData_MissingNodesYieldEmptyValues() throws Exception { KettleEnvironment.init(); System.setProperty( Const.KETTLE_XML_MISSING_TAG_YIELDS_NULL_VALUE, "N" ); testGetXMLData( Const.EMPTY_STRING ); }
Example #19
Source File: StepWithMappingMetaTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Before public void setupBefore() throws Exception { // Without initialization of the Kettle Environment, the load of the transformation fails // when run in Windows (saying it cannot find the Database plugin ID for Oracle). Digging into // it I discovered that it's during the read of the shared objects xml which doesn't reference Oracle // at all. Initializing the environment fixed everything. KettleEnvironment.init(); }
Example #20
Source File: PDI_2875_Test.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() throws KettleException { KettleEnvironment.init(); smh = new StepMockHelper<TextFileInputMeta, TextFileInputData>( "CsvInputTest", TextFileInputMeta.class, TextFileInputData.class ); when( smh.logChannelInterfaceFactory.create( any(), any( LoggingObjectInterface.class ) ) ) .thenReturn( smh.logChannelInterface ); when( smh.trans.isRunning() ).thenReturn( true ); }
Example #21
Source File: ExcelOutputTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() throws KettleException { KettleEnvironment.init(); helper = new StepMockHelper<>( "ExcelOutputTest", ExcelOutputMeta.class, ExcelOutputData.class ); when( helper.logChannelInterfaceFactory.create( any(), any( LoggingObjectInterface.class ) ) ).thenReturn( helper.logChannelInterface ); when( helper.trans.isRunning() ).thenReturn( true ); }
Example #22
Source File: TableCompareIT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUpBeforeClass() throws Exception { KettleEnvironment.init(); log = new SimpleLoggingObject( "junit", LoggingObjectType.GENERAL, null ); databaseMeta = new DatabaseMeta( "TableCompare", "Hypersonic", "JDBC", null, "mem:HSQLDB-JUNIT-LOGJOB", null, null, null ); }
Example #23
Source File: MetaInjectTemplateInFSAndRepoSpecMethodIT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@BeforeClass public static void initKettle() throws Exception { KettleEnvironment.init( false ); Map<Class<?>, String> classMap = new HashMap<>(); classMap.put( StepMetaInterface.class, "org.pentaho.di.trans.steps.metainject.MetaInjectMeta" ); List<String> libraries = new ArrayList<>(); PluginInterface plugin = new Plugin( new String[] { "MetaInject" }, StepPluginType.class, StepMetaInterface.class, "Flow", "MetaInjectMeta", null, null, false, false, classMap, libraries, null, null ); PluginRegistry.getInstance().registerPlugin( StepPluginType.class, plugin ); }
Example #24
Source File: JavaScriptStringIT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void testStringsNum() throws Exception { KettleEnvironment.init(); Locale.setDefault( Locale.ENGLISH ); // // Create a javascript step // ScriptValuesMetaMod svm = new ScriptValuesMetaMod(); ScriptValuesScript[] js = new ScriptValuesScript[] { new ScriptValuesScript( ScriptValuesScript.TRANSFORM_SCRIPT, "script", "var numb1 = str2num(trim(string.getString()), \"#.#\", \"en\");\n" + "var bool1 = isNum(string.getString());\n" + "var str1 = num2str(numb1);\n" ) }; svm.setJSScripts( js ); svm.setFieldname( new String[] { "numb1", "bool1", "str1" } ); svm.setRename( new String[] { "", "", "" } ); svm.setType( new int[] { ValueMetaInterface.TYPE_NUMBER, ValueMetaInterface.TYPE_BOOLEAN, ValueMetaInterface.TYPE_STRING } ); svm.setLength( new int[] { -1, -1, -1 } ); svm.setPrecision( new int[] { -1, -1, -1 } ); svm.setReplace( new boolean[] { false, false, false, } ); svm.setCompatible( true ); // Generate a test transformation with an injector and a dummy: // String testStepname = "javascript"; TransMeta transMeta = TransTestFactory.generateTestTransformation( new Variables(), svm, testStepname ); // Now execute the transformation and get the result from the dummy step. // List<RowMetaAndData> result = TransTestFactory.executeTestTransformation( transMeta, TransTestFactory.INJECTOR_STEPNAME, testStepname, TransTestFactory.DUMMY_STEPNAME, createData3() ); // Verify that this is what we expected... // checkRows( result, createResultData3() ); }
Example #25
Source File: JobImportIT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Before public void setUp() throws KettleException { KettleEnvironment.init(); Props.init( Props.TYPE_PROPERTIES_SPOON ); deleteFolder( new File( REPOSITORY_ROOT_DIR ) ); KettleFileRepositoryMeta repositoryMeta = new KettleFileRepositoryMeta( FILE_REPOSITORY_ID, FILE_REPOSITORY_NAME, FILE_REPOSITORY_DESC, REPOSITORY_ROOT_DIR ); repository = new KettleFileRepository(); repository.init( repositoryMeta ); repository.connect( null, null ); importer = new RepositoryImporter( repository ); }
Example #26
Source File: ActiveMQProviderTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@BeforeClass public static void setupClass() throws Exception { StepPluginType.getInstance().handlePluginAnnotation( JmsProducerMeta.class, JmsProducerMeta.class.getAnnotation( org.pentaho.di.core.annotations.Step.class ), Collections.emptyList(), false, null ); StepPluginType.getInstance().handlePluginAnnotation( JmsConsumerMeta.class, JmsConsumerMeta.class.getAnnotation( org.pentaho.di.core.annotations.Step.class ), Collections.emptyList(), false, null ); KettleEnvironment.init(); }
Example #27
Source File: PDI_2875_Test.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@BeforeClass public static void setUp() throws KettleException { KettleEnvironment.init(); smh = new StepMockHelper<TextFileInputMeta, TextFileInputData>( "CsvInputTest", TextFileInputMeta.class, TextFileInputData.class ); when( smh.logChannelInterfaceFactory.create( any(), any( LoggingObjectInterface.class ) ) ) .thenReturn( smh.logChannelInterface ); when( smh.trans.isRunning() ).thenReturn( true ); }
Example #28
Source File: GroupByMetaTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Before public void setUpLoadSave() throws Exception { KettleEnvironment.init(); PluginRegistry.init( false ); List<String> attributes = Arrays.asList( "passAllRows", "directory", "prefix", "aggregateIgnored", "aggregateIgnoredField", "addingLineNrInGroup", "lineNrInGroupField", "alwaysGivingBackOneRow", "groupField", "aggregateField", "subjectField", "aggregateType", "valueField" ); Map<String, String> getterMap = new HashMap<String, String>() { { put( "passAllRows", "passAllRows" ); } }; Map<String, String> setterMap = new HashMap<String, String>(); FieldLoadSaveValidator<String[]> stringArrayLoadSaveValidator = new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 5 ); Map<String, FieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>(); attrValidatorMap.put( "groupField", stringArrayLoadSaveValidator ); attrValidatorMap.put( "aggregateField", stringArrayLoadSaveValidator ); attrValidatorMap.put( "subjectField", stringArrayLoadSaveValidator ); attrValidatorMap.put( "valueField", stringArrayLoadSaveValidator ); attrValidatorMap.put( "aggregateType", new PrimitiveIntArrayLoadSaveValidator( new IntLoadSaveValidator( GroupByMeta.typeGroupCode.length ), 5 ) ); Map<String, FieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>(); loadSaveTester = new LoadSaveTester( testMetaClass, attributes, new ArrayList<String>(), new ArrayList<String>(), getterMap, setterMap, attrValidatorMap, typeValidatorMap, this ); }
Example #29
Source File: SymmetricCryptoTransMetaTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void testRoundTrip() throws KettleException { KettleEnvironment.init(); List<String> attributes = Arrays.asList( "operation_type", "algorithm", "schema", "secretKeyField", "messageField", "resultfieldname", "secretKey", "secretKeyInField", "readKeyAsBinary", "outputResultAsBinary" ); Map<String, String> getterMap = new HashMap<String, String>(); getterMap.put( "operation_type", "getOperationType" ); getterMap.put( "algorithm", "getAlgorithm" ); getterMap.put( "schema", "getSchema" ); getterMap.put( "secretKeyField", "getSecretKeyField" ); getterMap.put( "messageField", "getMessageField" ); getterMap.put( "resultfieldname", "getResultfieldname" ); getterMap.put( "secretKey", "getSecretKey" ); getterMap.put( "secretKeyInField", "isSecretKeyInField" ); getterMap.put( "readKeyAsBinary", "isReadKeyAsBinary" ); getterMap.put( "outputResultAsBinary", "isOutputResultAsBinary" ); Map<String, String> setterMap = new HashMap<String, String>(); setterMap.put( "operation_type", "setOperationType" ); setterMap.put( "algorithm", "setAlgorithm" ); setterMap.put( "schema", "setSchema" ); setterMap.put( "secretKeyField", "setsecretKeyField" ); setterMap.put( "messageField", "setMessageField" ); setterMap.put( "resultfieldname", "setResultfieldname" ); setterMap.put( "secretKey", "setSecretKey" ); setterMap.put( "secretKeyInField", "setSecretKeyInField" ); setterMap.put( "readKeyAsBinary", "setReadKeyAsBinary" ); setterMap.put( "outputResultAsBinary", "setOutputResultAsBinary" ); Map<String, FieldLoadSaveValidator<?>> fieldLoadSaveValidator = new HashMap<String, FieldLoadSaveValidator<?>>(); fieldLoadSaveValidator.put( "operation_type", new IntLoadSaveValidator( SymmetricCryptoTransMeta.operationTypeCode.length ) ); LoadSaveTester loadSaveTester = new LoadSaveTester( SymmetricCryptoTransMeta.class, attributes, getterMap, setterMap, fieldLoadSaveValidator, new HashMap<String, FieldLoadSaveValidator<?>>() ); loadSaveTester.testSerialization(); }
Example #30
Source File: KettleUtils.java From OpenKettleWebUI with Apache License 2.0 | 5 votes |
/** * @title initKettleFileRepository * @description 初始化一个kettle文件资源库 * @param dir * @return KettleFileRepository * @throws KettleException */ public static KettleFileRepository initFileRepository(String dir) throws KettleException { KettleFileRepository repository = null; // 初始化 KettleEnvironment.init(); // 资源库元对象 KettleFileRepositoryMeta repMeta = new KettleFileRepositoryMeta("", "", "数据采集", "file:///" + dir); // 文件形式的资源库 repository = new KettleFileRepository(); repository.init(repMeta); return repository; }