Java Code Examples for org.pentaho.di.repository.RepositoryDirectoryInterface#findDirectory()
The following examples show how to use
org.pentaho.di.repository.RepositoryDirectoryInterface#findDirectory() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AbstractBaseCommandExecutor.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public RepositoryDirectoryInterface loadRepositoryDirectory( Repository repository, String dirName, String noRepoProvidedMsgTkn, String allocateAndConnectRepoMsgTkn, String cannotFindDirMsgTkn ) throws KettleException { if ( repository == null ) { System.out.println( BaseMessages.getString( getPkgClazz(), noRepoProvidedMsgTkn ) ); return null; } RepositoryDirectoryInterface directory; // Default is the root directory logDebug( allocateAndConnectRepoMsgTkn ); directory = repository.loadRepositoryDirectoryTree(); if ( !StringUtils.isEmpty( dirName ) ) { directory = directory.findDirectory( dirName ); // Find the directory name if one is specified... if ( directory == null ) { System.out.println( BaseMessages.getString( getPkgClazz(), cannotFindDirMsgTkn, "" + dirName ) ); } } return directory; }
Example 2
Source File: UIEERepositoryDirectoryIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Test public void testUiDelete() throws Exception { RepositoryDirectoryInterface rootDir = repository.loadRepositoryDirectoryTree(); final String startDirName = "home"; final String testDirName = "testdir"; final String startDirPath = "/" + startDirName; final String testDirPath = startDirPath + "/" + testDirName; RepositoryDirectoryInterface startDir = rootDir.findDirectory( startDirName ); final RepositoryDirectoryInterface testDirCreated = repository.createRepositoryDirectory( startDir, testDirName ); assertNotNull( testDirCreated ); assertNotNull( testDirCreated.getObjectId() ); rootDir = repository.loadRepositoryDirectoryTree(); final RepositoryDirectoryInterface startDirFound = repository.findDirectory( startDirPath ); final RepositoryDirectoryInterface testDirFound = repository.findDirectory( testDirPath ); Assert.assertNotNull( testDirFound ); final UIEERepositoryDirectory startDirUi = new UIEERepositoryDirectory( startDirFound, null, repository ); final UIEERepositoryDirectory testDirUi = new UIEERepositoryDirectory( testDirFound, startDirUi, repository ); testDirUi.delete( true ); RepositoryDirectoryInterface testDirFound2 = repository.findDirectory( testDirPath ); Assert.assertNull( testDirFound2 ); }
Example 3
Source File: JobExportIT.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Given a Job located in non-root directory of a repository, * and referencing to a Transformation using 'Internal.Entry.Current.Directory' variable. * <br/> * When this Job is exported into a zip file, * then the referenced Transformation should be exported as well. */ @Test public void shouldExportJobAndRelatedTransformationFile() throws IOException, MetaStoreException, KettleException { RepositoryDirectoryInterface repositoryDir = repository.loadRepositoryDirectoryTree(); repositoryDir = repositoryDir.findDirectory( REPOSITORY_DIR ); JobMeta jobMeta = repository.loadJob( JOB_NAME, repositoryDir, null, null ); Job job = new Job( repository, jobMeta ); ResourceUtil.serializeResourceExportInterface( EXPORT_FILE, job.getJobMeta(), job, repository, null ); File zipFile = new File( EXPORT_FILE ); assertTrue( zipFile.exists() ); extractZip( zipFile, EXTRACT_DIR ); // assert that either of files, job and transformation, have been exported. assertTrue( new File( EXTRACTED_JOB_FILE ).exists() ); assertTrue( new File( EXTRACTED_TRANS_FILE ).exists() ); }
Example 4
Source File: UIEERepositoryDirectoryIT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void testUiDeleteNotEmpty() throws Exception { RepositoryDirectoryInterface rootDir = repository.loadRepositoryDirectoryTree(); final String startDirName = "home"; final String testDirName = "testdir"; final String testDir2Name = "testdir2"; final String startDirPath = "/" + startDirName; final String testDirPath = startDirPath + "/" + testDirName; final String testDir2Path = testDirPath + "/" + testDir2Name; RepositoryDirectoryInterface startDir = rootDir.findDirectory( startDirName ); final RepositoryDirectoryInterface testDirCreated = repository.createRepositoryDirectory( startDir, testDirName ); final RepositoryDirectoryInterface testDir2Created = repository.createRepositoryDirectory( testDirCreated, testDir2Name ); assertNotNull( testDirCreated ); assertNotNull( testDirCreated.getObjectId() ); assertNotNull( testDir2Created ); rootDir = repository.loadRepositoryDirectoryTree(); startDir = rootDir.findDirectory( startDirName ); final RepositoryDirectoryInterface startDirFound = repository.findDirectory( startDirPath ); final RepositoryDirectoryInterface testDirFound = repository.findDirectory( testDirPath ); Assert.assertNotNull( testDirFound ); final RepositoryDirectoryInterface testDir2Found = repository.findDirectory( testDir2Path ); Assert.assertNotNull( testDir2Found ); final UIEERepositoryDirectory startDirUi = new UIEERepositoryDirectory( startDirFound, null, repository ); final UIEERepositoryDirectory testDirUi = new UIEERepositoryDirectory( testDirFound, startDirUi, repository ); testDirUi.delete( true ); RepositoryDirectoryInterface testDirFound2 = repository.findDirectory( testDirPath ); Assert.assertNull( testDirFound2 ); }
Example 5
Source File: UIEERepositoryDirectoryIT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override protected RepositoryDirectoryInterface loadStartDirectory() throws Exception { RepositoryDirectoryInterface rootDir = repository.loadRepositoryDirectoryTree(); RepositoryDirectoryInterface startDir = rootDir.findDirectory( "public" ); assertNotNull( startDir ); return startDir; }
Example 6
Source File: PurRepositoryIT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void testLoadJob() throws Exception { RepositoryDirectoryInterface rootDir = initRepo(); JobMeta jobMeta = createJobMeta( EXP_JOB_NAME ); RepositoryDirectoryInterface jobsDir = rootDir.findDirectory( DIR_JOBS ); repository.save( jobMeta, VERSION_COMMENT_V1, null ); deleteStack.push( jobMeta ); JobMeta fetchedJob = repository.loadJob( EXP_JOB_NAME, jobsDir, null, null ); JobMeta jobMetaById = repository.loadJob( jobMeta.getObjectId(), null ); assertEquals( fetchedJob, jobMetaById ); assertNotNull( fetchedJob.getMetaStore() ); assertTrue( fetchedJob.getMetaStore() == jobMetaById.getMetaStore() ); }
Example 7
Source File: PurRepositoryIT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override protected RepositoryDirectoryInterface loadStartDirectory() throws Exception { RepositoryDirectoryInterface rootDir = repository.loadRepositoryDirectoryTree(); RepositoryDirectoryInterface startDir = rootDir.findDirectory( "public" ); assertNotNull( startDir ); return startDir; }
Example 8
Source File: JobImportIT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
/** * Import a file containing three jobs referencing each other. Verify that the jobs import correctly, in particular * that the ${Internal.Entry.Current.Directory} variable is correctly set in Job1 and Job2 */ @Test public void currentDirectoryVariableSetCorrectly() throws KettleException { RepositoryDirectoryInterface repositoryDir = repository.loadRepositoryDirectoryTree(); repositoryDir = repositoryDir.findDirectory( "/" ); importer.importAll( feedbackInterface, "", new String[] { EXPORT_FILE }, repositoryDir, true, true, "" ); JobMeta job1 = repository.loadJob( "/public/SupportPostgres/Job1", repositoryDir, null, null ); assertNotNull( job1 ); assertEquals( 3, job1.getJobCopies().size() ); JobEntryCopy jobEntryCopy1 = job1.getJobCopies().stream().filter( j -> j.getName().equals( "Job" ) ).findFirst().get(); assertNotNull( jobEntryCopy1 ); JobEntryJob jobEntryJob1 = (JobEntryJob) jobEntryCopy1.getEntry(); assertEquals( "${Internal.Entry.Current.Directory}/Subjob", jobEntryJob1.getDirectory() ); assertEquals( "Job2", jobEntryJob1.getJobName() ); //repositoryDir = repositoryDir.findDirectory( "/public/SupportPostgres/Subjob" ); JobMeta job2 = repository.loadJob( "/public/SupportPostgres/Subjob/Job2", repositoryDir, null, null ); assertNotNull( job2 ); assertEquals( 3, job2.getJobCopies().size() ); JobEntryCopy jobEntryCopy2 = job2.getJobCopies().stream().filter( j -> j.getName().equals( "Job" ) ).findFirst().get(); assertNotNull( jobEntryCopy2 ); JobEntryJob jobEntryJob2 = (JobEntryJob) jobEntryCopy2.getEntry(); assertEquals( "${Internal.Entry.Current.Directory}", jobEntryJob2.getDirectory() ); assertEquals( "Job3", jobEntryJob2.getJobName() ); }
Example 9
Source File: KettleFileRepository.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public ObjectId renameRepositoryDirectory( ObjectId id, RepositoryDirectoryInterface newParentDir, String newName ) throws KettleException { if ( newParentDir != null || newName != null ) { try { // In case of a root object, the ID is the same as the relative filename... RepositoryDirectoryInterface tree = loadRepositoryDirectoryTree(); RepositoryDirectoryInterface dir = tree.findDirectory( id ); if ( dir == null ) { throw new KettleException( "Could not find folder [" + id + "]" ); } // If newName is null, keep the current name newName = ( newName != null ) ? newName : dir.getName(); FileObject folder = KettleVFS.getFileObject( dir.getPath() ); String newFolderName = null; if ( newParentDir != null ) { FileObject newParentFolder = KettleVFS.getFileObject( newParentDir.getPath() ); newFolderName = newParentFolder.toString() + "/" + newName; } else { newFolderName = folder.getParent().toString() + "/" + newName; } FileObject newFolder = KettleVFS.getFileObject( newFolderName ); folder.moveTo( newFolder ); return new StringObjectId( dir.getObjectId() ); } catch ( Exception e ) { throw new KettleException( "Unable to rename directory folder to [" + id + "]" ); } } return ( id ); }
Example 10
Source File: KettleFileRepository.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public List<RepositoryElementMetaInterface> getJobObjects( ObjectId id_directory, boolean includeDeleted ) throws KettleException { try { List<RepositoryElementMetaInterface> list = new ArrayList<RepositoryElementMetaInterface>(); RepositoryDirectoryInterface tree = loadRepositoryDirectoryTree(); RepositoryDirectoryInterface directory = tree.findDirectory( id_directory ); String folderName = calcDirectoryName( directory ); FileObject folder = KettleVFS.getFileObject( folderName ); for ( FileObject child : folder.getChildren() ) { if ( child.getType().equals( FileType.FILE ) ) { if ( !child.isHidden() || !repositoryMeta.isHidingHiddenFiles() ) { String name = child.getName().getBaseName(); if ( name.endsWith( EXT_JOB ) ) { String jobName = name.substring( 0, name.length() - 4 ); ObjectId id = new StringObjectId( calcObjectId( directory, jobName, EXT_JOB ) ); Date date = new Date( child.getContent().getLastModifiedTime() ); list.add( new RepositoryObject( id, jobName, directory, "-", date, RepositoryObjectType.JOB, "", false ) ); } } } } return list; } catch ( Exception e ) { throw new KettleException( "Unable to get list of jobs in folder with id : " + id_directory, e ); } }
Example 11
Source File: KettleFileRepository.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public List<RepositoryElementMetaInterface> getTransformationObjects( ObjectId idDirectory, boolean includeDeleted ) throws KettleException { try { List<RepositoryElementMetaInterface> list = new ArrayList<RepositoryElementMetaInterface>(); RepositoryDirectoryInterface tree = loadRepositoryDirectoryTree(); RepositoryDirectoryInterface directory = tree.findDirectory( idDirectory ); String folderName = calcDirectoryName( directory ); FileObject folder = KettleVFS.getFileObject( folderName ); for ( FileObject child : folder.getChildren() ) { if ( child.getType().equals( FileType.FILE ) ) { if ( !child.isHidden() || !repositoryMeta.isHidingHiddenFiles() ) { String name = child.getName().getBaseName(); if ( name.endsWith( EXT_TRANSFORMATION ) ) { String transName = name.substring( 0, name.length() - 4 ); ObjectId id = new StringObjectId( calcObjectId( directory, transName, EXT_TRANSFORMATION ) ); Date date = new Date( child.getContent().getLastModifiedTime() ); list.add( new RepositoryObject( id, transName, directory, "-", date, RepositoryObjectType.TRANSFORMATION, "", false ) ); } } } } return list; } catch ( Exception e ) { throw new KettleException( "Unable to get list of transformations in folder with id : " + idDirectory, e ); } }
Example 12
Source File: KettleFileRepository.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public String[] getTransformationNames( ObjectId id_directory, boolean includeDeleted ) throws KettleException { try { List<String> list = new ArrayList<String>(); RepositoryDirectoryInterface tree = loadRepositoryDirectoryTree(); RepositoryDirectoryInterface directory = tree.findDirectory( id_directory ); String folderName = calcDirectoryName( directory ); FileObject folder = KettleVFS.getFileObject( folderName ); for ( FileObject child : folder.getChildren() ) { if ( child.getType().equals( FileType.FILE ) ) { if ( !child.isHidden() || !repositoryMeta.isHidingHiddenFiles() ) { String name = child.getName().getBaseName(); if ( name.endsWith( EXT_TRANSFORMATION ) ) { String transName = name.substring( 0, name.length() - 4 ); list.add( transName ); } } } } return list.toArray( new String[list.size()] ); } catch ( Exception e ) { throw new KettleException( "Unable to get list of transformations names in folder with id : " + id_directory, e ); } }
Example 13
Source File: KettleFileRepository.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public ObjectId[] getSubDirectoryIDs( ObjectId id_directory ) throws KettleException { RepositoryDirectoryInterface tree = loadRepositoryDirectoryTree(); RepositoryDirectoryInterface directory = tree.findDirectory( id_directory ); ObjectId[] objectIds = new ObjectId[directory.getNrSubdirectories()]; for ( int i = 0; i < objectIds.length; i++ ) { objectIds[i] = directory.getSubdirectory( i ).getObjectId(); } return objectIds; }
Example 14
Source File: KettleFileRepository.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public String[] getJobNames( ObjectId id_directory, boolean includeDeleted ) throws KettleException { try { List<String> list = new ArrayList<String>(); RepositoryDirectoryInterface tree = loadRepositoryDirectoryTree(); RepositoryDirectoryInterface directory = tree.findDirectory( id_directory ); String folderName = calcDirectoryName( directory ); FileObject folder = KettleVFS.getFileObject( folderName ); for ( FileObject child : folder.getChildren() ) { if ( child.getType().equals( FileType.FILE ) ) { if ( !child.isHidden() || !repositoryMeta.isHidingHiddenFiles() ) { String name = child.getName().getBaseName(); if ( name.endsWith( EXT_JOB ) ) { String jobName = name.substring( 0, name.length() - 4 ); list.add( jobName ); } } } } return list.toArray( new String[list.size()] ); } catch ( Exception e ) { throw new KettleException( "Unable to get list of transformations names in folder with id : " + id_directory, e ); } }
Example 15
Source File: KettleFileRepository.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public String[] getDirectoryNames( ObjectId id_directory ) throws KettleException { RepositoryDirectoryInterface tree = loadRepositoryDirectoryTree(); RepositoryDirectoryInterface directory = tree.findDirectory( id_directory ); String[] names = new String[directory.getNrSubdirectories()]; for ( int i = 0; i < names.length; i++ ) { names[i] = directory.getSubdirectory( i ).getName(); } return names; }
Example 16
Source File: GetRepositoryNames.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@SuppressWarnings( "deprecation" ) private List<RepositoryElementMetaInterface> getRepositoryObjects() throws KettleException { try { // The repository is available in the parent transformation metadata // Repository repository = getTransMeta().getRepository(); // Now populate the list... // List<RepositoryElementMetaInterface> list = new ArrayList<RepositoryElementMetaInterface>(); if ( repository instanceof RepositoryExtended ) { RepositoryExtended extendedRep = (RepositoryExtended) repository; for ( int i = 0; i < meta.getDirectory().length; i++ ) { String directoryPath = environmentSubstitute( meta.getDirectory()[i] ); String filter = null; // by default we look for current level int depth = 0; if ( meta.getObjectTypeSelection().areTransformationsSelected() ) { filter = "*.ktr"; } if ( meta.getObjectTypeSelection().areJobsSelected() ) { // if we have selected the job and transformation than we have applied filter with both condition filter = Utils.isEmpty( filter ) ? "*.kjb" : filter + "|*.kjb"; } // should include unlimited subfolder if ( meta.getIncludeSubFolders()[i] ) { depth = -1; } RepositoryDirectoryInterface directory = extendedRep.loadRepositoryDirectoryTree( directoryPath, filter, depth, BooleanUtils .isTrue( repository.getUserInfo().isAdmin() ), false, false ); list.addAll( getRepositoryObjects( directory, environmentSubstitute( meta.getNameMask()[i] ), environmentSubstitute( meta.getExcludeNameMask()[i] ) ) ); } } else { // Get the repository directory tree. // RepositoryDirectoryInterface tree = repository.loadRepositoryDirectoryTree(); // Loop over the directories and add the discovered objects to the list... // for ( int i = 0; i < meta.getDirectory().length; i++ ) { RepositoryDirectoryInterface dir = tree.findDirectory( environmentSubstitute( meta.getDirectory()[i] ) ); if ( dir != null ) { List<RepositoryElementMetaInterface> objects = getRepositoryObjects( repository, dir, meta.getIncludeSubFolders()[i], environmentSubstitute( meta .getNameMask()[i] ), environmentSubstitute( meta.getExcludeNameMask()[i] ) ); list.addAll( objects ); } } } return list; } catch ( Exception e ) { throw new KettleException( "Unable to get the list of repository objects from the repository", e ); } }
Example 17
Source File: PurRepositoryIT.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Test public void testExport() throws Exception { final String exportFileName = new File( "test.export" ).getAbsolutePath(); //$NON-NLS-1$ RepositoryDirectoryInterface rootDir = initRepo(); String uniqueTransName = EXP_TRANS_NAME.concat( EXP_DBMETA_NAME ); TransMeta transMeta = createTransMeta( EXP_DBMETA_NAME ); // Create a database association DatabaseMeta dbMeta = createDatabaseMeta( EXP_DBMETA_NAME ); repository.save( dbMeta, VERSION_COMMENT_V1, null ); TableInputMeta tableInputMeta = new TableInputMeta(); tableInputMeta.setDatabaseMeta( dbMeta ); transMeta.addStep( new StepMeta( EXP_TRANS_STEP_1_NAME, tableInputMeta ) ); RepositoryDirectoryInterface transDir = rootDir.findDirectory( DIR_TRANSFORMATIONS ); repository.save( transMeta, VERSION_COMMENT_V1, null ); deleteStack.push( transMeta ); // So this transformation is cleaned up afterward assertNotNull( transMeta.getObjectId() ); ObjectRevision version = transMeta.getObjectRevision(); assertNotNull( version ); assertTrue( hasVersionWithComment( transMeta, VERSION_COMMENT_V1 ) ); assertTrue( repository.exists( uniqueTransName, transDir, RepositoryObjectType.TRANSFORMATION ) ); JobMeta jobMeta = createJobMeta( EXP_JOB_NAME ); RepositoryDirectoryInterface jobsDir = rootDir.findDirectory( DIR_JOBS ); repository.save( jobMeta, VERSION_COMMENT_V1, null ); deleteStack.push( jobMeta ); assertNotNull( jobMeta.getObjectId() ); version = jobMeta.getObjectRevision(); assertNotNull( version ); assertTrue( hasVersionWithComment( jobMeta, VERSION_COMMENT_V1 ) ); assertTrue( repository.exists( EXP_JOB_NAME, jobsDir, RepositoryObjectType.JOB ) ); LogListener errorLogListener = new LogListener( LogLevel.ERROR ); KettleLogStore.getAppender().addLoggingEventListener( errorLogListener ); try { repository.getExporter().exportAllObjects( new MockProgressMonitorListener(), exportFileName, null, "all" ); //$NON-NLS-1$ FileObject exportFile = KettleVFS.getFileObject( exportFileName ); assertFalse( "file left open", exportFile.getContent().isOpen() ); assertNotNull( exportFile ); MockRepositoryExportParser parser = new MockRepositoryExportParser(); SAXParserFactory.newInstance().newSAXParser().parse( KettleVFS.getInputStream( exportFile ), parser ); if ( parser.getFatalError() != null ) { throw parser.getFatalError(); } assertNotNull( "No nodes found in export", parser.getNodeNames() ); //$NON-NLS-1$ assertTrue( "No nodes found in export", !parser.getNodeNames().isEmpty() ); //$NON-NLS-1$ assertEquals( "Incorrect number of nodes", 5, parser.getNodeNames().size() ); //$NON-NLS-1$ assertEquals( "Incorrect number of transformations", 1, parser.getNodesWithName( "transformation" ).size() ); //$NON-NLS-1$ //$NON-NLS-2$ assertEquals( "Incorrect number of jobs", 1, parser.getNodesWithName( "job" ).size() ); //$NON-NLS-1$ //$NON-NLS-2$ assertTrue( "log error", errorLogListener.getEvents().isEmpty() ); } finally { KettleVFS.getFileObject( exportFileName ).delete(); KettleLogStore.getAppender().removeLoggingEventListener( errorLogListener ); } }
Example 18
Source File: PurRepositoryIT.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Test public void testExportWithRules() throws Exception { String fileName = "testExportWithRuled.xml"; final String exportFileName = new File( fileName ).getAbsolutePath(); //$NON-NLS-1$ RepositoryDirectoryInterface rootDir = initRepo(); String transWithoutNoteName = "2" + EXP_DBMETA_NAME; TransMeta transWithoutNote = createTransMeta( transWithoutNoteName ); String transUniqueName = EXP_TRANS_NAME.concat( transWithoutNoteName ); RepositoryDirectoryInterface transDir = rootDir.findDirectory( DIR_TRANSFORMATIONS ); repository.save( transWithoutNote, VERSION_COMMENT_V1, null ); deleteStack.push( transWithoutNote ); // So this transformation is cleaned up afterward assertNotNull( transWithoutNote.getObjectId() ); assertTrue( hasVersionWithComment( transWithoutNote, VERSION_COMMENT_V1 ) ); assertTrue( repository.exists( transUniqueName, transDir, RepositoryObjectType.TRANSFORMATION ) ); // Second transformation (contained note) String transWithNoteName = "1" + EXP_DBMETA_NAME; TransMeta transWithNote = createTransMeta( transWithNoteName ); transUniqueName = EXP_TRANS_NAME.concat( EXP_DBMETA_NAME ); TransMeta transWithRules = createTransMeta( EXP_DBMETA_NAME ); NotePadMeta note = new NotePadMeta( "Note Message", 1, 1, 100, 5 ); transWithRules.addNote( note ); repository.save( transWithRules, VERSION_COMMENT_V1, null ); deleteStack.push( transWithRules ); // So this transformation is cleaned up afterward assertNotNull( transWithRules.getObjectId() ); assertTrue( hasVersionWithComment( transWithRules, VERSION_COMMENT_V1 ) ); assertTrue( repository.exists( transUniqueName, transDir, RepositoryObjectType.TRANSFORMATION ) ); // create rules for export to .xml file List<ImportRuleInterface> rules = new AbstractList<ImportRuleInterface>() { @Override public ImportRuleInterface get( int index ) { TransformationHasANoteImportRule rule = new TransformationHasANoteImportRule(); rule.setEnabled( true ); return rule; } @Override public int size() { return 1; } }; ImportRules importRules = new ImportRules(); importRules.setRules( rules ); // create exporter IRepositoryExporter exporter = repository.getExporter(); exporter.setImportRulesToValidate( importRules ); // export itself try { exporter.exportAllObjects( new MockProgressMonitorListener(), exportFileName, null, "all" ); //$NON-NLS-1$ FileObject exportFile = KettleVFS.getFileObject( exportFileName ); assertNotNull( exportFile ); MockRepositoryExportParser parser = new MockRepositoryExportParser(); SAXParserFactory.newInstance().newSAXParser().parse( KettleVFS.getInputStream( exportFile ), parser ); if ( parser.getFatalError() != null ) { throw parser.getFatalError(); } // assumed transformation with note will be here and only it assertEquals( "Incorrect number of transformations", 1, parser.getNodesWithName( RepositoryObjectType.TRANSFORMATION.getTypeDescription() ).size() ); //$NON-NLS-1$ //$NON-NLS-2$ } finally { KettleVFS.getFileObject( exportFileName ).delete(); } }
Example 19
Source File: PurRepository_SharedObjects_Links_IT.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@SuppressWarnings( "unchecked" ) private void testReadSharedObjects( GenericMeta gMeta ) throws Exception { PurRepository pur = (PurRepository) repository; RepositoryDirectoryInterface rootDir = initRepo(); SlaveServer slave1 = createSlaveServer( "slave1" ); SlaveServer slave2 = createSlaveServer( "slave2" ); pur.save( slave1, VERSION_COMMENT_V1, null ); pur.save( slave2, VERSION_COMMENT_V1, null ); AbstractMeta meta = gMeta.createFilled(); meta.getSlaveServers().add( slave1 ); meta.getSlaveServers().add( slave2 ); rootDir.findDirectory( DIR_TRANSFORMATIONS ); pur.save( meta, VERSION_COMMENT_V1, null ); String xmlText = meta.getXML(); try { // import transformation from file meta = gMeta.createEmpty(); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); Document doc = dBuilder.parse( IOUtils.toInputStream( xmlText ) ); gMeta.loadFromXml( doc.getParentNode() ); List<SharedObjectInterface> sharedObjects = (List<SharedObjectInterface>) pur.loadAndCacheSharedObjects( false ).get( RepositoryObjectType.SLAVE_SERVER ); for ( int i = 0; i < meta.getSlaveServers().size(); i++ ) { for ( int j = 0; j < sharedObjects.size(); j++ ) { SlaveServer s1 = meta.getSlaveServers().get( i ); SlaveServer s2 = (SlaveServer) sharedObjects.get( j ); if ( s1 == s2 ) { fail( "Trans/job has direct links on slave servers from cache" ); } } } } finally { pur.deleteSlave( slave1.getObjectId() ); pur.deleteSlave( slave2.getObjectId() ); pur.clearSharedObjectCache(); } }