org.pentaho.di.job.JobMeta Java Examples
The following examples show how to use
org.pentaho.di.job.JobMeta.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KettleFileRepositoryTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Test public void testCurrentDirJob() throws Exception { final String dirName = "dirName"; final String jobName = "job"; JobMeta setupJobMeta = new JobMeta(); setupJobMeta.setName( jobName ); RepositoryDirectoryInterface repoDir = repository.createRepositoryDirectory( new RepositoryDirectory(), dirName ); setupJobMeta.setRepositoryDirectory( repoDir ); repository.save( setupJobMeta, "" ); JobMeta jobMeta = repository.loadJob( jobName, repoDir, null, "" ); assertEquals( repository, jobMeta.getRepository() ); assertEquals( repoDir.getPath(), jobMeta.getRepositoryDirectory().getPath() ); jobMeta.setInternalKettleVariables(); String currentDir = jobMeta.getVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY ); assertEquals( repoDir.getPath(), currentDir ); }
Example #2
Source File: JobFileListenerTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private void testProcessLinkedJobsWithNoFilename( final ObjectLocationSpecificationMethod method ) { JobEntryJob jobJobExecutor = spy( new JobEntryJob() ); jobJobExecutor.setFileName( null ); jobJobExecutor.setDirectory( "/path/to" ); jobJobExecutor.setJobName( "Job1" ); jobJobExecutor.setSpecificationMethod( method ); JobEntryCopy jobEntry = mock( JobEntryCopy.class ); when( jobEntry.getEntry() ).thenReturn( jobJobExecutor ); JobMeta parent = mock( JobMeta.class ); when( parent.nrJobEntries() ).thenReturn( 1 ); when( parent.getJobEntry( 0 ) ).thenReturn( jobEntry ); JobMeta result = jobFileListener.processLinkedJobs( parent ); JobEntryCopy meta = result.getJobEntry( 0 ); assertNotNull( meta ); JobEntryJob resultExecMeta = (JobEntryJob) meta.getEntry(); assertEquals( ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME, resultExecMeta.getSpecificationMethod() ); assertEquals( resultExecMeta.getDirectory(), "/path/to" ); assertEquals( resultExecMeta.getJobName(), "Job1" ); }
Example #3
Source File: StarModelerPerspective.java From pentaho-kettle with Apache License 2.0 | 6 votes |
protected void generateSqlJobButton(StarDomain starDomain) { final Spoon spoon = Spoon.getInstance(); List<DatabaseMeta> sharedDatabases = SharedDatabaseUtil.loadSharedDatabases(); // TODO: validate presence of repository, repository directory // JobGenerator jobGenerator = new JobGenerator(starDomain, spoon.rep, new RepositoryDirectory(), sharedDatabases, defaultLocale); try { JobMeta jobMeta = jobGenerator.generateSqlJob(); spoon.addJobGraph(jobMeta); SpoonPerspectiveManager.getInstance().activatePerspective(MainSpoonPerspective.class); } catch(Exception e) { new ErrorDialog(spoon.getShell(), BaseMessages.getString(PKG, "StarModelerPerspective.ErrorGeneratingSqlJob.Title"), BaseMessages.getString(PKG, "StarModelerPerspective.ErrorGeneratingSqlJob.Message"), e); } }
Example #4
Source File: JobExecutorMetaTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Test public void testExportResources() throws KettleException { JobExecutorMeta jobExecutorMeta = spy( new JobExecutorMeta() ); JobMeta jobMeta = mock( JobMeta.class ); String testName = "test"; doReturn( jobMeta ).when( jobExecutorMeta ).loadJobMetaProxy( any( JobExecutorMeta.class ), any( Repository.class ), any( VariableSpace.class ) ); when( jobMeta.exportResources( any( JobMeta.class ), any( Map.class ), any( ResourceNamingInterface.class ), any( Repository.class ), any( IMetaStore.class ) ) ).thenReturn( testName ); jobExecutorMeta.exportResources( null, null, null, null, null ); verify( jobMeta ).setFilename( "${" + Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY + "}/" + testName ); verify( jobExecutorMeta ).setSpecificationMethod( ObjectLocationSpecificationMethod.FILENAME ); }
Example #5
Source File: SpoonTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Test public void testJobToFileWithoutNameSaveToFile() throws Exception { JobMeta mockJobMeta = mock( JobMeta.class ); prepareSetSaveTests( spoon, log, mockSpoonPerspective, mockJobMeta, true, false, "NotMainSpoonPerspective", true, true, null, null, true, true ); doCallRealMethod().when( spoon ).saveToFile( mockJobMeta ); doReturn( true ).when( spoon ).saveFileAs( mockJobMeta ); assertTrue( spoon.saveToFile( mockJobMeta ) ); verify( mockJobMeta ).setRepository( spoon.rep ); verify( mockJobMeta ).setMetaStore( spoon.metaStore ); verify( spoon.delegates.tabs ).renameTabs(); verify( spoon ).enableMenus(); }
Example #6
Source File: JobEntryJob.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Override public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space, Repository repository, IMetaStore metaStore ) { if ( setLogfile ) { JobEntryValidatorUtils.andValidator().validate( this, "logfile", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) ); } if ( null != directory ) { // if from repo JobEntryValidatorUtils.andValidator().validate( this, "directory", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notNullValidator() ) ); JobEntryValidatorUtils.andValidator().validate( this, "jobName", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) ); } else { // else from xml file JobEntryValidatorUtils.andValidator().validate( this, "filename", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) ); } }
Example #7
Source File: SpoonJobDelegateTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Before public void before() { jobMap = new ArrayList<JobMeta>(); jobMeta = mock( JobMeta.class ); delegate = mock( SpoonJobDelegate.class ); spoon = mock( Spoon.class ); spoon.delegates = mock( SpoonDelegates.class ); spoon.delegates.tabs = mock( SpoonTabsDelegate.class ); spoon.variables = mock( RowMetaAndData.class ); delegate.spoon = spoon; doReturn( jobMap ).when( delegate ).getJobList(); doReturn( spoon ).when( delegate ).getSpoon(); jobLogTable = mock( JobLogTable.class ); }
Example #8
Source File: StreamToJobNodeConverterTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Test public void testConvertJobWithMissingPlugins() throws IOException, KettleException { RepositoryFile repositoryFile = new RepositoryFile.Builder( "test file" ).build(); IUnifiedRepository pur = mock( IUnifiedRepository.class ); when( pur.getFileById( "MissingEntries.ktr" ) ).thenReturn( repositoryFile ); JobMeta jobMeta = new JobMeta(); Repository repository = mock( Repository.class ); when( repository.loadJob( any( StringObjectId.class ), anyString() ) ).thenReturn( jobMeta ); StreamToJobNodeConverter jobNodeConverter = new StreamToJobNodeConverter( pur ); jobNodeConverter = spy( jobNodeConverter ); doReturn( repository ).when( jobNodeConverter ).connectToRepository(); try { jobNodeConverter.convert( getClass().getResource( "MissingEntries.kjb" ).openStream(), "UTF-8", "application/vnd.pentaho.transformation" ); } catch ( ConverterException e ) { assertTrue( e.getMessage().contains( "MissingPlugin" ) ); return; } fail(); }
Example #9
Source File: PurRepository.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private JobMeta buildJobMeta( final RepositoryFile file, final RepositoryDirectoryInterface parentDir, final NodeRepositoryFileData data, final ObjectRevision revision ) throws KettleException { JobMeta jobMeta = new JobMeta(); jobMeta.setName( file.getTitle() ); jobMeta.setFilename( file.getName() ); jobMeta.setDescription( file.getDescription() ); jobMeta.setObjectId( new StringObjectId( file.getId().toString() ) ); jobMeta.setObjectRevision( revision ); jobMeta.setRepository( this ); jobMeta.setRepositoryDirectory( parentDir ); jobMeta.setMetaStore( getMetaStore() ); readJobMetaSharedObjects( jobMeta ); // This should read from the local cache jobDelegate.dataNodeToElement( data.getNode(), jobMeta ); jobMeta.clearChanged(); return jobMeta; }
Example #10
Source File: SpoonDBDelegate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Get & show the SQL required to run the loaded job entry... * */ public void getJobSQL( JobMeta jobMeta ) { GetJobSQLProgressDialog pspd = new GetJobSQLProgressDialog( spoon.getShell(), jobMeta, spoon.getRepository() ); List<SQLStatement> stats = pspd.open(); if ( stats != null ) { // null means error, but we already displayed the error if ( stats.size() > 0 ) { SQLStatementsDialog ssd = new SQLStatementsDialog( spoon.getShell(), jobMeta, SWT.NONE, stats ); ssd.open(); } else { MessageBox mb = new MessageBox( spoon.getShell(), SWT.OK | SWT.ICON_INFORMATION ); mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.JobNoSQLNeedEexecuted.Message" ) ); mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.JobNoSQLNeedEexecuted.Title" ) ); mb.open(); } } }
Example #11
Source File: JobEntryXMLWellFormed.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space, Repository repository, IMetaStore metaStore ) { boolean res = andValidator().validate( this, "arguments", remarks, putValidators( notNullValidator() ) ); if ( res == false ) { return; } ValidatorContext ctx = new ValidatorContext(); putVariableSpace( ctx, getVariables() ); putValidators( ctx, notNullValidator(), fileExistsValidator() ); for ( int i = 0; i < source_filefolder.length; i++ ) { andValidator().validate( this, "arguments[" + i + "]", remarks, ctx ); } }
Example #12
Source File: RepositoryFileProvider.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private void isFileOpenedInFolder( String path ) throws KettleException { List<TransMeta> openedTransFiles = getSpoon().delegates.trans.getTransformationList(); for ( TransMeta t : openedTransFiles ) { if ( t.getRepositoryDirectory().getPath() != null && ( t.getRepositoryDirectory().getPath() + "/" ).startsWith( path + "/" ) ) { throw new KettleTransException(); } } List<JobMeta> openedJobFiles = getSpoon().delegates.jobs.getJobList(); for ( JobMeta j : openedJobFiles ) { if ( j.getRepositoryDirectory().getPath() != null && ( j.getRepositoryDirectory().getPath() + "/" ).startsWith( path + "/" ) ) { throw new KettleJobException(); } } }
Example #13
Source File: JobEntryWriteToLogDialog.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public JobEntryWriteToLogDialog( Shell parent, JobEntryInterface jobEntryInt, Repository rep, JobMeta jobMeta ) { super( parent, jobEntryInt, rep, jobMeta ); jobEntry = (JobEntryWriteToLog) jobEntryInt; if ( this.jobEntry.getName() == null ) { this.jobEntry.setName( BaseMessages.getString( PKG, "WriteToLog.Name.Default" ) ); } }
Example #14
Source File: PaloCubeDeleteDialog.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public PaloCubeDeleteDialog( Shell parent, JobEntryInterface jobEntryInt, Repository rep, JobMeta jobMeta ) { super( parent, jobEntryInt, rep, jobMeta ); props = PropsUI.getInstance(); this.jobEntry = (PaloCubeDelete) jobEntryInt; if ( this.jobEntry.getName() == null ) { this.jobEntry.setName( jobEntryInt.getName() ); } this.jobMeta = jobMeta; }
Example #15
Source File: JobTracker.java From pentaho-kettle with Apache License 2.0 | 5 votes |
/** * @param jobMeta * The job metadata to track * @param maxChildren * The maximum number of children to keep track of (1000 is the default) */ public JobTracker( JobMeta jobMeta, int maxChildren ) { if ( jobMeta != null ) { this.jobName = jobMeta.getName(); this.jobFilename = jobMeta.getFilename(); } this.jobTrackers = new LinkedList<JobTracker>(); this.maxChildren = maxChildren; this.lock = new ReentrantReadWriteLock(); }
Example #16
Source File: JobEntrySSH2GETDialog.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public JobEntrySSH2GETDialog( Shell parent, JobEntryInterface jobEntryInt, Repository rep, JobMeta jobMeta ) { super( parent, jobEntryInt, rep, jobMeta ); jobEntry = (JobEntrySSH2GET) jobEntryInt; if ( this.jobEntry.getName() == null ) { this.jobEntry.setName( BaseMessages.getString( PKG, "JobSSH2GET.Name.Default" ) ); } }
Example #17
Source File: JobGraph.java From pentaho-kettle with Apache License 2.0 | 5 votes |
/** * Finds the last active job in the running job to the openened jobMeta * * @param jobGraph * @param newJob */ private void attachActiveJob( JobGraph jobGraph, JobMeta newJobMeta, JobEntryCopy jobEntryCopy ) { if ( job != null && jobGraph != null ) { Job subJob = spoon.findActiveJob( job, jobEntryCopy ); if ( subJob != null ) { jobGraph.setJob( subJob ); jobGraph.jobGridDelegate.setJobTracker( subJob.getJobTracker() ); if ( !jobGraph.isExecutionResultsPaneVisible() ) { jobGraph.showExecutionResults(); } jobGraph.setControlStates(); } } }
Example #18
Source File: JobEntryGetPOPDialog.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public JobEntryGetPOPDialog( Shell parent, JobEntryInterface jobEntryInt, Repository rep, JobMeta jobMeta ) { super( parent, jobEntryInt, rep, jobMeta ); jobEntry = (JobEntryGetPOP) jobEntryInt; if ( this.jobEntry.getName() == null ) { this.jobEntry.setName( BaseMessages.getString( PKG, "JobGetPOP.Name.Default" ) ); } }
Example #19
Source File: JobEntryFTP.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space, Repository repository, IMetaStore metaStore ) { JobEntryValidatorUtils.andValidator().validate( this, "serverName", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) ); JobEntryValidatorUtils.andValidator().validate( this, "targetDirectory", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator(), JobEntryValidatorUtils.fileExistsValidator() ) ); JobEntryValidatorUtils.andValidator().validate( this, "userName", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) ); JobEntryValidatorUtils.andValidator().validate( this, "password", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notNullValidator() ) ); }
Example #20
Source File: JobEntryMysqlBulkFile.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space, Repository repository, IMetaStore metaStore ) { JobEntryValidatorUtils.andValidator().validate( this, "filename", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) ); JobEntryValidatorUtils.andValidator().validate( this, "tablename", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) ); }
Example #21
Source File: KettleDatabaseRepositoryJobDelegate.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public SharedObjects readSharedObjects( JobMeta jobMeta ) throws KettleException { jobMeta.setSharedObjects( jobMeta.readSharedObjects() ); readDatabases( jobMeta, true ); readSlaves( jobMeta, true ); return jobMeta.getSharedObjects(); }
Example #22
Source File: CheckConnectionsDialog.java From knowbi-pentaho-pdi-neo4j-output with Apache License 2.0 | 5 votes |
public CheckConnectionsDialog( Shell parent, JobEntryInterface jobEntry, Repository rep, JobMeta jobMeta ) { super( parent, jobEntry, rep, jobMeta ); this.jobEntry = (CheckConnections) jobEntry; connectionFactory = new MetaStoreFactory<>( NeoConnection.class, Spoon.getInstance().getMetaStore(), Neo4jDefaults.NAMESPACE ); if ( this.jobEntry.getName() == null ) { this.jobEntry.setName( "Check Neo4j Connections" ); } }
Example #23
Source File: JobEntryCheckDbConnectionsDialog.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public JobEntryCheckDbConnectionsDialog( Shell parent, JobEntryInterface jobEntryInt, Repository rep, JobMeta jobMeta ) { super( parent, jobEntryInt, rep, jobMeta ); jobEntry = (JobEntryCheckDbConnections) jobEntryInt; if ( this.jobEntry.getName() == null ) { this.jobEntry.setName( BaseMessages.getString( PKG, "JobCheckDbConnections.Name.Default" ) ); } }
Example #24
Source File: JobEntryZipFileDialog.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public JobEntryZipFileDialog( Shell parent, JobEntryInterface jobEntryInt, Repository rep, JobMeta jobMeta ) { super( parent, jobEntryInt, rep, jobMeta ); jobEntry = (JobEntryZipFile) jobEntryInt; if ( this.jobEntry.getName() == null ) { this.jobEntry.setName( BaseMessages.getString( PKG, "JobZipFiles.Name.Default" ) ); } }
Example #25
Source File: JobEntryPGPVerify.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public List<ResourceReference> getResourceDependencies( JobMeta jobMeta ) { List<ResourceReference> references = super.getResourceDependencies( jobMeta ); if ( !Utils.isEmpty( gpglocation ) ) { String realFileName = jobMeta.environmentSubstitute( gpglocation ); ResourceReference reference = new ResourceReference( this ); reference.getEntries().add( new ResourceEntry( realFileName, ResourceType.FILE ) ); references.add( reference ); } return references; }
Example #26
Source File: JobEntryDeleteFilesDialog.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public JobEntryDeleteFilesDialog( Shell parent, JobEntryInterface jobEntryInt, Repository rep, JobMeta jobMeta ) { super( parent, jobEntryInt, rep, jobMeta ); jobEntry = (JobEntryDeleteFiles) jobEntryInt; if ( this.jobEntry.getName() == null ) { this.jobEntry.setName( BaseMessages.getString( PKG, "JobDeleteFiles.Name.Default" ) ); } }
Example #27
Source File: SharedObjectSyncUtil.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private synchronized void synchronizeAll( boolean includeActive, Consumer<AbstractMeta> synchronizeAction ) { EngineMetaInterface current = spoon.getActiveMeta(); for ( TransMeta trans : spoonDelegates.trans.getLoadedTransformations() ) { if ( includeActive || trans != current ) { synchronizeAction.accept( trans ); } } for ( JobMeta job : spoonDelegates.jobs.getLoadedJobs() ) { if ( includeActive || job != current ) { synchronizeAction.accept( job ); } } }
Example #28
Source File: StreamToJobNodeConverter.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@VisibleForTesting JobMeta filterPrivateDatabases( JobMeta jobMeta ) { Set<String> privateDatabases = jobMeta.getPrivateDatabases(); if ( privateDatabases != null ) { // keep only private transformation databases for ( Iterator<DatabaseMeta> it = jobMeta.getDatabases().iterator(); it.hasNext(); ) { DatabaseMeta databaseMeta = it.next(); String databaseName = databaseMeta.getName(); if ( !privateDatabases.contains( databaseName ) && !jobMeta.isDatabaseConnectionUsed( databaseMeta ) ) { it.remove(); } } } return jobMeta; }
Example #29
Source File: JobEntryTableExistsDialog.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public JobEntryTableExistsDialog( Shell parent, JobEntryInterface jobEntryInt, Repository rep, JobMeta jobMeta ) { super( parent, jobEntryInt, rep, jobMeta ); jobEntry = (JobEntryTableExists) jobEntryInt; if ( this.jobEntry.getName() == null ) { this.jobEntry.setName( BaseMessages.getString( PKG, "JobTableExists.Name.Default" ) ); } }
Example #30
Source File: SharedObjectSyncUtilTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void synchronizeConnections_sync_shared_only() throws Exception { final String databaseName = "DB"; DatabaseMeta sharedDB0 = createDatabaseMeta( databaseName, true ); saveSharedObjects( SHARED_OBJECTS_FILE, sharedDB0 ); JobMeta job1 = createJobMeta(); DatabaseMeta sharedDB1 = job1.getDatabase( 0 ); spoonDelegates.jobs.addJob( job1 ); DatabaseMeta unsharedDB2 = createDatabaseMeta( databaseName, false ); JobMeta job2 = createJobMeta(); spoonDelegates.jobs.addJob( job2 ); job2.removeDatabase( 0 ); job2.addDatabase( unsharedDB2 ); JobMeta job3 = createJobMeta(); DatabaseMeta sharedDB3 = job3.getDatabase( 0 ); spoonDelegates.jobs.addJob( job3 ); job3.addDatabase( sharedDB3 ); sharedDB3.setHostname( AFTER_SYNC_VALUE ); sharedUtil.synchronizeConnections( sharedDB3, sharedDB3.getName() ); assertThat( sharedDB1.getHostname(), equalTo( AFTER_SYNC_VALUE ) ); assertThat( unsharedDB2.getHostname(), equalTo( BEFORE_SYNC_VALUE ) ); }