Java Code Examples for org.pentaho.di.job.JobMeta#getDatabases()
The following examples show how to use
org.pentaho.di.job.JobMeta#getDatabases() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JobDelegate.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void saveSharedObjects( final RepositoryElementInterface element, final String versionComment ) throws KettleException { JobMeta jobMeta = (JobMeta) element; // Now store the databases in the job. // Only store if the database has actually changed or doesn't have an object ID (imported) // for ( DatabaseMeta databaseMeta : jobMeta.getDatabases() ) { if ( databaseMeta.hasChanged() || databaseMeta.getObjectId() == null ) { if ( databaseMeta.getObjectId() == null || unifiedRepositoryConnectionAclService.hasAccess( databaseMeta.getObjectId(), RepositoryFilePermission.WRITE ) ) { repo.save( databaseMeta, versionComment, null ); } else { log.logError( BaseMessages.getString( PKG, "PurRepository.ERROR_0004_DATABASE_UPDATE_ACCESS_DENIED", databaseMeta.getName() ) ); } } } // Store the slave server // for ( SlaveServer slaveServer : jobMeta.getSlaveServers() ) { if ( slaveServer.hasChanged() || slaveServer.getObjectId() == null ) { repo.save( slaveServer, versionComment, null ); } } }
Example 2
Source File: JobDelegate.java From pentaho-kettle with Apache License 2.0 | 5 votes |
protected void loadJobMetaDetails( DataNode rootNode, JobMeta jobMeta ) throws KettleException { try { jobMeta.setExtendedDescription( getString( rootNode, PROP_EXTENDED_DESCRIPTION ) ); jobMeta.setJobversion( getString( rootNode, PROP_JOB_VERSION ) ); jobMeta.setJobstatus( (int) rootNode.getProperty( PROP_JOB_STATUS ).getLong() ); jobMeta.getJobLogTable().setTableName( getString( rootNode, PROP_TABLE_NAME_LOG ) ); jobMeta.setCreatedUser( getString( rootNode, PROP_CREATED_USER ) ); jobMeta.setCreatedDate( getDate( rootNode, PROP_CREATED_DATE ) ); jobMeta.setModifiedUser( getString( rootNode, PROP_MODIFIED_USER ) ); jobMeta.setModifiedDate( getDate( rootNode, PROP_MODIFIED_DATE ) ); if ( rootNode.hasProperty( PROP_DATABASE_LOG ) ) { String id = rootNode.getProperty( PROP_DATABASE_LOG ).getRef().getId().toString(); DatabaseMeta conn = ( DatabaseMeta.findDatabase( jobMeta.getDatabases(), new StringObjectId( id ) ) ); jobMeta.getJobLogTable().setConnectionName( conn.getName() ); } jobMeta.getJobLogTable().setBatchIdUsed( rootNode.getProperty( PROP_USE_BATCH_ID ).getBoolean() ); jobMeta.setBatchIdPassed( rootNode.getProperty( PROP_PASS_BATCH_ID ).getBoolean() ); jobMeta.getJobLogTable().setLogFieldUsed( rootNode.getProperty( PROP_USE_LOGFIELD ).getBoolean() ); jobMeta.getJobLogTable().setLogSizeLimit( getString( rootNode, PROP_LOG_SIZE_LIMIT ) ); // Load the logging tables too.. // RepositoryAttributeInterface attributeInterface = new PurRepositoryAttribute( rootNode, jobMeta.getDatabases() ); for ( LogTableInterface logTable : jobMeta.getLogTables() ) { logTable.loadFromRepository( attributeInterface ); } // Load the attributes map // AttributesMapUtil.loadAttributesMap( rootNode, jobMeta ); } catch ( Exception e ) { throw new KettleException( "Error loading job details", e ); } }
Example 3
Source File: JobDelegate.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private void saveJobDetails( DataNode rootNode, JobMeta jobMeta ) throws KettleException { rootNode.setProperty( PROP_EXTENDED_DESCRIPTION, jobMeta.getExtendedDescription() ); rootNode.setProperty( PROP_JOB_VERSION, jobMeta.getJobversion() ); rootNode.setProperty( PROP_JOB_STATUS, jobMeta.getJobstatus() < 0 ? -1L : jobMeta.getJobstatus() ); if ( jobMeta.getJobLogTable().getDatabaseMeta() != null ) { DataNodeRef ref = new DataNodeRef( jobMeta.getJobLogTable().getDatabaseMeta().getObjectId().getId() ); rootNode.setProperty( PROP_DATABASE_LOG, ref ); } rootNode.setProperty( PROP_TABLE_NAME_LOG, jobMeta.getJobLogTable().getTableName() ); rootNode.setProperty( PROP_CREATED_USER, jobMeta.getCreatedUser() ); rootNode.setProperty( PROP_CREATED_DATE, jobMeta.getCreatedDate() ); rootNode.setProperty( PROP_MODIFIED_USER, jobMeta.getModifiedUser() ); rootNode.setProperty( PROP_MODIFIED_DATE, jobMeta.getModifiedDate() ); rootNode.setProperty( PROP_USE_BATCH_ID, jobMeta.getJobLogTable().isBatchIdUsed() ); rootNode.setProperty( PROP_PASS_BATCH_ID, jobMeta.isBatchIdPassed() ); rootNode.setProperty( PROP_USE_LOGFIELD, jobMeta.getJobLogTable().isLogFieldUsed() ); rootNode.setProperty( PROP_SHARED_FILE, jobMeta.getSharedObjectsFile() ); rootNode.setProperty( PROP_LOG_SIZE_LIMIT, jobMeta.getJobLogTable().getLogSizeLimit() ); // Save the logging tables too.. // RepositoryAttributeInterface attributeInterface = new PurRepositoryAttribute( rootNode, jobMeta.getDatabases() ); for ( LogTableInterface logTable : jobMeta.getLogTables() ) { logTable.saveToRepository( attributeInterface ); } // Load the attributes map // AttributesMapUtil.saveAttributesMap( rootNode, jobMeta ); }
Example 4
Source File: StreamToJobNodeConverter.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void saveSharedObjects( final Repository repo, final RepositoryElementInterface element ) throws KettleException { JobMeta jobMeta = (JobMeta) element; // First store the databases and other depending objects in the transformation. List<String> databaseNames = Arrays.asList( repo.getDatabaseNames( true ) ); int dbIndex = 0; int indexToReplace = 0; boolean updateMeta = Boolean.FALSE; for ( DatabaseMeta databaseMeta : jobMeta.getDatabases() ) { if ( !databaseNames.contains( databaseMeta.getName() ) ) { if ( databaseMeta.getObjectId() == null || !StringUtils.isEmpty( databaseMeta.getHostname() ) ) { repo.save( databaseMeta, null, null ); } } else if ( databaseMeta.getObjectId() == null ) { indexToReplace = dbIndex; updateMeta = Boolean.TRUE; } dbIndex++; } // if db already exists in repo, get that object id and put it // in the transMeta db collection if ( updateMeta ) { DatabaseMeta dbMetaToReplace = jobMeta.getDatabase( indexToReplace ); dbMetaToReplace.setObjectId( repo.getDatabaseID( dbMetaToReplace.getName() ) ); jobMeta.removeDatabase( indexToReplace ); jobMeta.addDatabase( dbMetaToReplace ); } // Store the slave servers... // for ( SlaveServer slaveServer : jobMeta.getSlaveServers() ) { if ( slaveServer.getObjectId() == null ) { repo.save( slaveServer, null, null ); } } }
Example 5
Source File: SharedObjectSyncUtil.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public List<DatabaseMeta> getObjectsForSyncFromJob( JobMeta job ) { return job.getDatabases(); }