Java Code Examples for org.pentaho.di.repository.Repository#loadDatabaseMetaFromJobEntryAttribute()
The following examples show how to use
org.pentaho.di.repository.Repository#loadDatabaseMetaFromJobEntryAttribute() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JobEntryMysqlBulkLoad.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" ); tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" ); filename = rep.getJobEntryAttributeString( id_jobentry, "filename" ); separator = rep.getJobEntryAttributeString( id_jobentry, "separator" ); enclosed = rep.getJobEntryAttributeString( id_jobentry, "enclosed" ); escaped = rep.getJobEntryAttributeString( id_jobentry, "escaped" ); linestarted = rep.getJobEntryAttributeString( id_jobentry, "linestarted" ); lineterminated = rep.getJobEntryAttributeString( id_jobentry, "lineterminated" ); replacedata = rep.getJobEntryAttributeBoolean( id_jobentry, "replacedata" ); ignorelines = rep.getJobEntryAttributeString( id_jobentry, "ignorelines" ); listattribut = rep.getJobEntryAttributeString( id_jobentry, "listattribut" ); localinfile = rep.getJobEntryAttributeBoolean( id_jobentry, "localinfile" ); prorityvalue = (int) rep.getJobEntryAttributeInteger( id_jobentry, "prorityvalue" ); addfiletoresult = rep.getJobEntryAttributeBoolean( id_jobentry, "addfiletoresult" ); connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Unable to load job entry of type 'Mysql bulk load' from the repository for id_jobentry=" + id_jobentry, dbe ); } }
Example 2
Source File: JobEntryWaitForSQL.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Override public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases ); schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" ); tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" ); successCondition = getSuccessConditionByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "success_condition" ), "" ) ); rowsCountValue = rep.getJobEntryAttributeString( id_jobentry, "rows_count_value" ); iscustomSQL = rep.getJobEntryAttributeBoolean( id_jobentry, "is_custom_sql" ); isUseVars = rep.getJobEntryAttributeBoolean( id_jobentry, "is_usevars" ); isAddRowsResult = rep.getJobEntryAttributeBoolean( id_jobentry, "add_rows_result" ); customSQL = rep.getJobEntryAttributeString( id_jobentry, "custom_sql" ); maximumTimeout = rep.getJobEntryAttributeString( id_jobentry, "maximum_timeout" ); checkCycleTime = rep.getJobEntryAttributeString( id_jobentry, "check_cycle_time" ); successOnTimeout = rep.getJobEntryAttributeBoolean( id_jobentry, "success_on_timeout" ); isClearResultList = rep.getJobEntryAttributeBoolean( id_jobentry, "clear_result_rows" ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages .getString( PKG, "JobEntryWaitForSQL.UnableLoadRep", "" + id_jobentry ), dbe ); } }
Example 3
Source File: JobEntryEvalTableContent.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases ); schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" ); tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" ); successCondition = getSuccessConditionByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "success_condition" ), "" ) ); limit = rep.getJobEntryAttributeString( id_jobentry, "limit" ); useCustomSQL = rep.getJobEntryAttributeBoolean( id_jobentry, "is_custom_sql" ); useVars = rep.getJobEntryAttributeBoolean( id_jobentry, "is_usevars" ); addRowsResult = rep.getJobEntryAttributeBoolean( id_jobentry, "add_rows_result" ); clearResultList = rep.getJobEntryAttributeBoolean( id_jobentry, "clear_result_rows" ); customSQL = rep.getJobEntryAttributeString( id_jobentry, "custom_sql" ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryEvalTableContent.UnableLoadRep", "" + id_jobentry ), dbe ); } }
Example 4
Source File: JobEntryTruncateTables.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases ); this.argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" ); allocate( argnr ); // Read them all... for ( int a = 0; a < argnr; a++ ) { this.arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" ); this.schemaname[a] = rep.getJobEntryAttributeString( id_jobentry, a, "schemaname" ); } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryTruncateTables.UnableLoadRep", "" + id_jobentry ), dbe ); } }
Example 5
Source File: JobEntryColumnsExist.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" ); schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" ); connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" ); arguments = new String[argnr]; // Read them all... for ( int a = 0; a < argnr; a++ ) { arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" ); } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryColumnsExist.Meta.UnableLoadRep", "" + id_jobentry ), dbe ); } }
Example 6
Source File: JobEntryCheckDbConnections.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { // How many connections? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "id_database" ); connections = new DatabaseMeta[argnr]; waitfors = new String[argnr]; waittimes = new int[argnr]; // Read them all... for ( int a = 0; a < argnr; a++ ) { connections[a] = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", a, "id_database", databases ); waitfors[a] = rep.getJobEntryAttributeString( id_jobentry, a, "waitfor" ); waittimes[a] = getWaitByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, a, "waittime" ), "" ) ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryCheckDbConnections.ERROR_0002_Cannot_Load_Job_From_Repository", "" + id_jobentry, dbe .getMessage() ) ); } }
Example 7
Source File: PaloCubeCreate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { this.databaseMeta = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases ); this.setCubeName( rep.getStepAttributeString( id_jobentry, "cubeName" ) ); int nrFields = rep.countNrStepAttributes( id_jobentry, "dimensionname" ); for ( int i = 0; i < nrFields; i++ ) { String dimensionName = rep.getStepAttributeString( id_jobentry, i, "dimensionname" ); this.dimensionNames.add( dimensionName ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to load job entry for type file exists from the repository for id_jobentry=" + id_jobentry, dbe ); } }
Example 8
Source File: JobEntryMysqlBulkFile.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" ); tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" ); filename = rep.getJobEntryAttributeString( id_jobentry, "filename" ); separator = rep.getJobEntryAttributeString( id_jobentry, "separator" ); enclosed = rep.getJobEntryAttributeString( id_jobentry, "enclosed" ); lineterminated = rep.getJobEntryAttributeString( id_jobentry, "lineterminated" ); limitlines = rep.getJobEntryAttributeString( id_jobentry, "limitlines" ); listcolumn = rep.getJobEntryAttributeString( id_jobentry, "listcolumn" ); highpriority = rep.getJobEntryAttributeBoolean( id_jobentry, "highpriority" ); optionenclosed = rep.getJobEntryAttributeBoolean( id_jobentry, "optionenclosed" ); outdumpvalue = (int) rep.getJobEntryAttributeInteger( id_jobentry, "outdumpvalue" ); iffileexists = (int) rep.getJobEntryAttributeInteger( id_jobentry, "iffileexists" ); addfiletoresult = rep.getJobEntryAttributeBoolean( id_jobentry, "addfiletoresult" ); connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Unable to load job entry of type 'table exists' from the repository for id_jobentry=" + id_jobentry, dbe ); } }
Example 9
Source File: JobEntrySQL.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId idJobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { sql = rep.getJobEntryAttributeString( idJobentry, SQL_TAG ); String sSubs = rep.getJobEntryAttributeString( idJobentry, USE_VARIABLE_SUBSTITUTION_TAG ); if ( sSubs != null && sSubs.equalsIgnoreCase( "T" ) ) { useVariableSubstitution = true; } String ssql = rep.getJobEntryAttributeString( idJobentry, SQLFROMFILE_TAG ); if ( ssql != null && ssql.equalsIgnoreCase( "T" ) ) { sqlFromFile = true; } String ssendOneStatement = rep.getJobEntryAttributeString( idJobentry, SEND_ONE_STATEMENT_TAG ); if ( ssendOneStatement != null && ssendOneStatement.equalsIgnoreCase( "T" ) ) { sendOneStatement = true; } sqlFilename = rep.getJobEntryAttributeString( idJobentry, SQLFILENAME_TAG ); databaseMeta = rep.loadDatabaseMetaFromJobEntryAttribute( idJobentry, CONNECTION_TAG, ID_DATABASE, databases ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Unable to load job entry of type 'sql' from the repository with idJobentry=" + idJobentry, dbe ); } }
Example 10
Source File: JobEntryTableExists.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" ); schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" ); connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "TableExists.Meta.UnableLoadRep", "" + id_jobentry ), dbe ); } }
Example 11
Source File: PaloCubeDelete.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { this.databaseMeta = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases ); this.setCubeName( rep.getStepAttributeString( id_jobentry, "cubeName" ) ); } catch ( KettleException dbe ) { throw new KettleException( "Unable to load job entry for type file exists from the repository for id_jobentry=" + id_jobentry, dbe ); } }
Example 12
Source File: JobEntryMssqlBulkLoad.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" ); tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" ); filename = rep.getJobEntryAttributeString( id_jobentry, "filename" ); datafiletype = rep.getJobEntryAttributeString( id_jobentry, "datafiletype" ); fieldterminator = rep.getJobEntryAttributeString( id_jobentry, "fieldterminator" ); lineterminated = rep.getJobEntryAttributeString( id_jobentry, "lineterminated" ); codepage = rep.getJobEntryAttributeString( id_jobentry, "codepage" ); specificcodepage = rep.getJobEntryAttributeString( id_jobentry, "specificcodepage" ); formatfilename = rep.getJobEntryAttributeString( id_jobentry, "formatfilename" ); firetriggers = rep.getJobEntryAttributeBoolean( id_jobentry, "firetriggers" ); checkconstraints = rep.getJobEntryAttributeBoolean( id_jobentry, "checkconstraints" ); keepnulls = rep.getJobEntryAttributeBoolean( id_jobentry, "keepnulls" ); keepidentity = rep.getJobEntryAttributeBoolean( id_jobentry, "keepidentity" ); tablock = rep.getJobEntryAttributeBoolean( id_jobentry, "tablock" ); startfile = (int) rep.getJobEntryAttributeInteger( id_jobentry, "startfile" ); endfile = (int) rep.getJobEntryAttributeInteger( id_jobentry, "endfile" ); orderby = rep.getJobEntryAttributeString( id_jobentry, "orderby" ); orderdirection = rep.getJobEntryAttributeString( id_jobentry, "orderdirection" ); errorfilename = rep.getJobEntryAttributeString( id_jobentry, "errorfilename" ); maxerrors = (int) rep.getJobEntryAttributeInteger( id_jobentry, "maxerrors" ); batchsize = (int) rep.getJobEntryAttributeInteger( id_jobentry, "batchsize" ); rowsperbatch = (int) rep.getJobEntryAttributeInteger( id_jobentry, "rowsperbatch" ); adddatetime = rep.getJobEntryAttributeBoolean( id_jobentry, "adddatetime" ); addfiletoresult = rep.getJobEntryAttributeBoolean( id_jobentry, "addfiletoresult" ); truncate = rep.getJobEntryAttributeBoolean( id_jobentry, "truncate" ); connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Unable to load job entry of type 'MSsql bulk load' from the repository for id_jobentry=" + id_jobentry, dbe ); } }