Java Code Examples for org.pentaho.di.job.JobMeta#getObjectId()
The following examples show how to use
org.pentaho.di.job.JobMeta#getObjectId() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SpoonEEJobDelegate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Override public void addJobGraph( JobMeta jobMeta ) { super.addJobGraph( jobMeta ); TabMapEntry tabEntry = spoon.delegates.tabs.findTabMapEntry( jobMeta ); if ( tabEntry != null ) { TabItem tabItem = tabEntry.getTabItem(); try { if ( ( service != null ) && ( jobMeta.getObjectId() != null ) && ( service.getJobLock( jobMeta.getObjectId() ) != null ) ) { tabItem.setImage( GUIResource.getInstance().getImageLocked() ); } } catch ( Exception e ) { throw new RuntimeException( e ); } } }
Example 2
Source File: RepositoryBrowserController.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private boolean isJobOpened( String id, String path, String name ) { List<JobMeta> openedJobFiles = getSpoon().delegates.jobs.getJobList(); for ( JobMeta j : openedJobFiles ) { if ( j.getObjectId() != null && id.equals( j.getObjectId().getId() ) || ( path.equals( j.getRepositoryDirectory().getPath() ) && name.equals( j.getName() ) ) ) { return true; } } return false; }
Example 3
Source File: RepositoryFileProvider.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private boolean isJobOpened( String id, String path, String name ) { List<JobMeta> openedJobFiles = getSpoon().delegates.jobs.getJobList(); for ( JobMeta j : openedJobFiles ) { if ( j.getObjectId() != null && id.equals( j.getObjectId().getId() ) || ( path.equals( j.getRepositoryDirectory().getPath() ) && name.equals( j.getName() ) ) ) { return true; } } return false; }
Example 4
Source File: RepositoryTestBase.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Ignore @Test public void testRenameAndUndelete() throws Exception { RepositoryDirectoryInterface rootDir = initRepo(); JobMeta jobMeta = createJobMeta( EXP_JOB_NAME ); RepositoryDirectoryInterface jobsDir = rootDir.findDirectory( DIR_JOBS ); repository.save( jobMeta, VERSION_COMMENT_V1, null ); deleteStack.push( jobMeta ); repository.deleteJob( jobMeta.getObjectId() ); assertFalse( repository.exists( EXP_JOB_NAME, jobsDir, RepositoryObjectType.JOB ) ); RepositoryObject robj = new RepositoryObject( jobMeta.getObjectId(), jobMeta.getName(), jobMeta.getRepositoryDirectory(), null, null, jobMeta.getRepositoryElementType(), null, false ); repository.undeleteObject( robj ); assertTrue( repository.exists( EXP_JOB_NAME, jobsDir, RepositoryObjectType.JOB ) ); repository.renameJob( jobMeta.getObjectId(), jobsDir, EXP_JOB_NAME_NEW ); assertFalse( repository.exists( EXP_JOB_NAME, jobsDir, RepositoryObjectType.JOB ) ); assertTrue( repository.exists( EXP_JOB_NAME_NEW, jobsDir, RepositoryObjectType.JOB ) ); TransMeta transMeta = createTransMeta( EXP_DBMETA_NAME ); RepositoryDirectoryInterface transDir = rootDir.findDirectory( DIR_TRANSFORMATIONS ); repository.save( transMeta, VERSION_COMMENT_V1, null ); deleteStack.push( transMeta ); repository.renameTransformation( transMeta.getObjectId(), transDir, EXP_TRANS_NAME_NEW ); assertFalse( repository.exists( EXP_TRANS_NAME.concat( EXP_DBMETA_NAME ), transDir, RepositoryObjectType.TRANSFORMATION ) ); assertTrue( repository.exists( EXP_TRANS_NAME_NEW, transDir, RepositoryObjectType.TRANSFORMATION ) ); DatabaseMeta dbMeta = createDatabaseMeta( EXP_DBMETA2_NAME ); repository.save( dbMeta, VERSION_COMMENT_V1, null ); deleteStack.push( dbMeta ); dbMeta.setName( EXP_DBMETA_NAME_NEW ); repository.save( dbMeta, VERSION_COMMENT_V2, null ); assertFalse( repository.exists( EXP_DBMETA2_NAME, null, RepositoryObjectType.DATABASE ) ); assertTrue( repository.exists( EXP_DBMETA_NAME_NEW, null, RepositoryObjectType.DATABASE ) ); }
Example 5
Source File: KettleDatabaseRepositoryJobDelegate.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private synchronized void insertJob( JobMeta jobMeta ) throws KettleException { RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_JOB ), jobMeta .getObjectId() ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY ), jobMeta .getRepositoryDirectory().getObjectId() ); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_NAME ), jobMeta .getName() ); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_DESCRIPTION ), jobMeta .getDescription() ); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION ), jobMeta .getExtendedDescription() ); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_JOB_VERSION ), jobMeta .getJobversion() ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_JOB_STATUS ), new Long( jobMeta.getJobstatus() < 0 ? -1L : jobMeta.getJobstatus() ) ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG ), jobMeta .getJobLogTable().getDatabaseMeta() != null ? jobMeta.getJobLogTable().getDatabaseMeta().getObjectId() : -1L ); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG ), jobMeta .getJobLogTable().getTableName() ); table.addValue( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID ), jobMeta .getJobLogTable().isBatchIdUsed() ); table.addValue( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD ), jobMeta .getJobLogTable().isLogFieldUsed() ); repository.connectionDelegate.insertJobAttribute( jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT, 0, jobMeta .getJobLogTable().getLogSizeLimit() ); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_CREATED_USER ), jobMeta .getCreatedUser() ); table.addValue( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_CREATED_DATE ), jobMeta .getCreatedDate() ); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER ), jobMeta .getModifiedUser() ); table.addValue( new ValueMetaDate( KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE ), jobMeta .getModifiedDate() ); table.addValue( new ValueMetaBoolean( KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID ), jobMeta .isBatchIdPassed() ); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_SHARED_FILE ), jobMeta .getSharedObjectsFile() ); repository.connectionDelegate.getDatabase().prepareInsert( table.getRowMeta(), KettleDatabaseRepository.TABLE_R_JOB ); repository.connectionDelegate.getDatabase().setValuesInsert( table ); repository.connectionDelegate.getDatabase().insertRow(); if ( log.isDebug() ) { log.logDebug( "Inserted new record into table " + quoteTable( KettleDatabaseRepository.TABLE_R_JOB ) + " with data : " + table ); } repository.connectionDelegate.getDatabase().closeInsert(); // Save the logging connection link... if ( jobMeta.getJobLogTable().getDatabaseMeta() != null ) { repository.insertJobEntryDatabase( jobMeta.getObjectId(), null, jobMeta .getJobLogTable().getDatabaseMeta().getObjectId() ); } // Save the logging tables too.. // RepositoryAttributeInterface attributeInterface = new KettleDatabaseRepositoryJobAttribute( repository.connectionDelegate, jobMeta.getObjectId() ); for ( LogTableInterface logTable : jobMeta.getLogTables() ) { logTable.saveToRepository( attributeInterface ); } }