Java Code Examples for org.pentaho.di.repository.Repository#saveDatabaseMetaJobEntryAttribute()
The following examples show how to use
org.pentaho.di.repository.Repository#saveDatabaseMetaJobEntryAttribute() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JobEntryMysqlBulkLoad.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "schemaname", schemaname ); rep.saveJobEntryAttribute( id_job, getObjectId(), "tablename", tablename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "filename", filename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "separator", separator ); rep.saveJobEntryAttribute( id_job, getObjectId(), "enclosed", enclosed ); rep.saveJobEntryAttribute( id_job, getObjectId(), "escaped", escaped ); rep.saveJobEntryAttribute( id_job, getObjectId(), "linestarted", linestarted ); rep.saveJobEntryAttribute( id_job, getObjectId(), "lineterminated", lineterminated ); rep.saveJobEntryAttribute( id_job, getObjectId(), "replacedata", replacedata ); rep.saveJobEntryAttribute( id_job, getObjectId(), "ignorelines", ignorelines ); rep.saveJobEntryAttribute( id_job, getObjectId(), "listattribut", listattribut ); rep.saveJobEntryAttribute( id_job, getObjectId(), "localinfile", localinfile ); rep.saveJobEntryAttribute( id_job, getObjectId(), "prorityvalue", prorityvalue ); rep.saveJobEntryAttribute( id_job, getObjectId(), "addfiletoresult", addfiletoresult ); rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Unable to load job entry of type 'Mysql Bulk Load' to the repository for id_job=" + id_job, dbe ); } }
Example 2
Source File: JobEntryWaitForSQL.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection ); rep.saveJobEntryAttribute( id_job, getObjectId(), "schemaname", schemaname ); rep.saveJobEntryAttribute( id_job, getObjectId(), "tablename", tablename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "success_condition", getSuccessConditionCode( successCondition ) ); rep.saveJobEntryAttribute( id_job, getObjectId(), "rows_count_value", rowsCountValue ); rep.saveJobEntryAttribute( id_job, getObjectId(), "custom_sql", customSQL ); rep.saveJobEntryAttribute( id_job, getObjectId(), "is_custom_sql", iscustomSQL ); rep.saveJobEntryAttribute( id_job, getObjectId(), "is_usevars", isUseVars ); rep.saveJobEntryAttribute( id_job, getObjectId(), "add_rows_result", isAddRowsResult ); rep.saveJobEntryAttribute( id_job, getObjectId(), "maximum_timeout", maximumTimeout ); rep.saveJobEntryAttribute( id_job, getObjectId(), "check_cycle_time", checkCycleTime ); rep.saveJobEntryAttribute( id_job, getObjectId(), "success_on_timeout", successOnTimeout ); rep.saveJobEntryAttribute( id_job, getObjectId(), "clear_result_rows", isClearResultList ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryWaitForSQL.UnableSaveRep", "" + id_job ), dbe ); } }
Example 3
Source File: JobEntryEvalTableContent.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection ); rep.saveJobEntryAttribute( id_job, getObjectId(), "schemaname", schemaname ); rep.saveJobEntryAttribute( id_job, getObjectId(), "tablename", tablename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "success_condition", getSuccessConditionCode( successCondition ) ); rep.saveJobEntryAttribute( id_job, getObjectId(), "limit", limit ); rep.saveJobEntryAttribute( id_job, getObjectId(), "custom_sql", customSQL ); rep.saveJobEntryAttribute( id_job, getObjectId(), "is_custom_sql", useCustomSQL ); rep.saveJobEntryAttribute( id_job, getObjectId(), "is_usevars", useVars ); rep.saveJobEntryAttribute( id_job, getObjectId(), "add_rows_result", addRowsResult ); rep.saveJobEntryAttribute( id_job, getObjectId(), "clear_result_rows", clearResultList ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryEvalTableContent.UnableSaveRep", "" + id_job ), dbe ); } }
Example 4
Source File: JobEntryMysqlBulkFile.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "schemaname", schemaname ); rep.saveJobEntryAttribute( id_job, getObjectId(), "tablename", tablename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "filename", filename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "separator", separator ); rep.saveJobEntryAttribute( id_job, getObjectId(), "enclosed", enclosed ); rep.saveJobEntryAttribute( id_job, getObjectId(), "lineterminated", lineterminated ); rep.saveJobEntryAttribute( id_job, getObjectId(), "limitlines", limitlines ); rep.saveJobEntryAttribute( id_job, getObjectId(), "listcolumn", listcolumn ); rep.saveJobEntryAttribute( id_job, getObjectId(), "highpriority", highpriority ); rep.saveJobEntryAttribute( id_job, getObjectId(), "optionenclosed", optionenclosed ); rep.saveJobEntryAttribute( id_job, getObjectId(), "outdumpvalue", outdumpvalue ); rep.saveJobEntryAttribute( id_job, getObjectId(), "iffileexists", iffileexists ); rep.saveJobEntryAttribute( id_job, getObjectId(), "addfiletoresult", addfiletoresult ); rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Unable to load job entry of type 'Mysql Bulk Load' to the repository for id_job=" + id_job, dbe ); } }
Example 5
Source File: JobEntryTruncateTables.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection ); rep.saveJobEntryAttribute( id_job, getObjectId(), "arg_from_previous", this.argFromPrevious ); // save the arguments... if ( this.arguments != null ) { for ( int i = 0; i < this.arguments.length; i++ ) { rep.saveJobEntryAttribute( id_job, getObjectId(), i, "name", this.arguments[i] ); rep.saveJobEntryAttribute( id_job, getObjectId(), i, "schemaname", this.schemaname[i] ); } } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryTruncateTables.UnableSaveRep", "" + id_job ), dbe ); } }
Example 6
Source File: JobEntryColumnsExist.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "tablename", tablename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "schemaname", schemaname ); rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection ); // save the arguments... if ( arguments != null ) { for ( int i = 0; i < arguments.length; i++ ) { rep.saveJobEntryAttribute( id_job, getObjectId(), i, "name", arguments[i] ); } } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryColumnsExist.Meta.UnableSaveRep", "" + id_job ), dbe ); } }
Example 7
Source File: JobEntryCheckDbConnections.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { // save the arguments... if ( connections != null ) { for ( int i = 0; i < connections.length; i++ ) { rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), i, "connection", "id_database", connections[i] ); rep.saveJobEntryAttribute( id_job, getObjectId(), i, "waittime", getWaitTimeCode( waittimes[i] ) ); rep.saveJobEntryAttribute( id_job, getObjectId(), i, "waitfor", waitfors[i] ); } } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryCheckDbConnections.ERROR_0003_Cannot_Save_Job_Entry", "" + id_job, dbe.getMessage() ) ); } }
Example 8
Source File: JobEntryMssqlBulkLoad.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "schemaname", schemaname ); rep.saveJobEntryAttribute( id_job, getObjectId(), "tablename", tablename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "filename", filename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "datafiletype", datafiletype ); rep.saveJobEntryAttribute( id_job, getObjectId(), "fieldterminator", fieldterminator ); rep.saveJobEntryAttribute( id_job, getObjectId(), "lineterminated", lineterminated ); rep.saveJobEntryAttribute( id_job, getObjectId(), "codepage", codepage ); rep.saveJobEntryAttribute( id_job, getObjectId(), "specificcodepage", specificcodepage ); rep.saveJobEntryAttribute( id_job, getObjectId(), "formatfilename", formatfilename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "firetriggers", firetriggers ); rep.saveJobEntryAttribute( id_job, getObjectId(), "checkconstraints", checkconstraints ); rep.saveJobEntryAttribute( id_job, getObjectId(), "keepnulls", keepnulls ); rep.saveJobEntryAttribute( id_job, getObjectId(), "keepidentity", keepidentity ); rep.saveJobEntryAttribute( id_job, getObjectId(), "tablock", tablock ); rep.saveJobEntryAttribute( id_job, getObjectId(), "startfile", startfile ); rep.saveJobEntryAttribute( id_job, getObjectId(), "endfile", endfile ); rep.saveJobEntryAttribute( id_job, getObjectId(), "orderby", orderby ); rep.saveJobEntryAttribute( id_job, getObjectId(), "orderdirection", orderdirection ); rep.saveJobEntryAttribute( id_job, getObjectId(), "errorfilename", errorfilename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "maxerrors", maxerrors ); rep.saveJobEntryAttribute( id_job, getObjectId(), "batchsize", batchsize ); rep.saveJobEntryAttribute( id_job, getObjectId(), "rowsperbatch", rowsperbatch ); rep.saveJobEntryAttribute( id_job, getObjectId(), "adddatetime", adddatetime ); rep.saveJobEntryAttribute( id_job, getObjectId(), "addfiletoresult", addfiletoresult ); rep.saveJobEntryAttribute( id_job, getObjectId(), "truncate", truncate ); rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Unable to load job entry of type 'MSsql Bulk Load' to the repository for id_job=" + id_job, dbe ); } }
Example 9
Source File: JobEntrySQL.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId idJob ) throws KettleException { try { rep.saveDatabaseMetaJobEntryAttribute( idJob, getObjectId(), CONNECTION_TAG, ID_DATABASE, databaseMeta ); rep.saveJobEntryAttribute( idJob, getObjectId(), SQL_TAG, sql ); rep.saveJobEntryAttribute( idJob, getObjectId(), USE_VARIABLE_SUBSTITUTION_TAG, useVariableSubstitution ? "T" : "F" ); rep.saveJobEntryAttribute( idJob, getObjectId(), SQLFROMFILE_TAG, sqlFromFile ? "T" : "F" ); rep.saveJobEntryAttribute( idJob, getObjectId(), SQLFILENAME_TAG, sqlFilename ); rep.saveJobEntryAttribute( idJob, getObjectId(), SEND_ONE_STATEMENT_TAG, sendOneStatement ? "T" : "F" ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Unable to save job entry of type 'sql' to the repository for idJob=" + idJob, dbe ); } }
Example 10
Source File: JobEntryTableExists.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "tablename", tablename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "schemaname", schemaname ); rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "TableExists.Meta.UnableSaveRep", "" + id_job ), dbe ); } }
Example 11
Source File: PaloCubeDelete.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", databaseMeta ); rep.saveStepAttribute( id_job, getObjectId(), "cubeName", this.getCubeName() ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( "unable to save jobentry of type 'file exists' to the repository for id_job=" + id_job, dbe ); } }
Example 12
Source File: PaloCubeCreate.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", databaseMeta ); rep.saveJobEntryAttribute( id_job, getObjectId(), "cubeName", this.getCubeName() ); for ( int i = 0; i < this.dimensionNames.size(); i++ ) { rep.saveJobEntryAttribute( id_job, getObjectId(), i, "dimensionname", this.dimensionNames.get( i ) ); } } catch ( KettleDatabaseException dbe ) { throw new KettleException( "unable to save jobentry of type 'file exists' to the repository for id_job=" + id_job, dbe ); } }