org.pentaho.di.core.exception.KettleDatabaseException Java Examples
The following examples show how to use
org.pentaho.di.core.exception.KettleDatabaseException.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KettleDatabaseRepositoryJobDelegate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Read the slave servers in the repository and add them to this transformation if they are not yet present. * * @param jobMeta * The job to put the slave servers in * @param overWriteShared * if an object with the same name exists, overwrite * @throws KettleException */ public void readSlaves( JobMeta jobMeta, boolean overWriteShared ) throws KettleException { try { ObjectId[] dbids = repository.getSlaveIDs( false ); for ( int i = 0; i < dbids.length; i++ ) { SlaveServer slaveServer = repository.loadSlaveServer( dbids[i], null ); // Load last version slaveServer.shareVariablesWith( jobMeta ); SlaveServer check = jobMeta.findSlaveServer( slaveServer.getName() ); // Check if there already is one in the // transformation if ( check == null || overWriteShared ) { if ( !Utils.isEmpty( slaveServer.getName() ) ) { jobMeta.addOrReplaceSlaveServer( slaveServer ); if ( !overWriteShared ) { slaveServer.setChanged( false ); } } } } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages .getString( PKG, "JobMeta.Log.UnableToReadSlaveServersFromRepository" ), dbe ); } }
Example #2
Source File: KettleDatabaseRepositoryTransDelegate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Read the slave servers in the repository and add them to this transformation if they are not yet present. * * @param transMeta * The transformation to load into. * @param overWriteShared * if an object with the same name exists, overwrite * @throws KettleException */ public void readSlaves( TransMeta transMeta, boolean overWriteShared ) throws KettleException { try { ObjectId[] dbids = repository.getSlaveIDs( false ); for ( int i = 0; i < dbids.length; i++ ) { SlaveServer slaveServer = repository.loadSlaveServer( dbids[i], null ); // Load last version slaveServer.shareVariablesWith( transMeta ); SlaveServer check = transMeta.findSlaveServer( slaveServer.getName() ); // Check if there already is one in the // transformation if ( check == null || overWriteShared ) { if ( !Utils.isEmpty( slaveServer.getName() ) ) { transMeta.addOrReplaceSlaveServer( slaveServer ); if ( !overWriteShared ) { slaveServer.setChanged( false ); } } } } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "TransMeta.Log.UnableToReadSlaveServersFromRepository" ), dbe ); } }
Example #3
Source File: JobEntryAddResultFilenames.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "arg_from_previous", argFromPrevious ); rep.saveJobEntryAttribute( id_job, getObjectId(), "include_subfolders", includeSubfolders ); rep.saveJobEntryAttribute( id_job, getObjectId(), "delete_all_before", deleteallbefore ); // save the arguments... if ( arguments != null ) { for ( int i = 0; i < arguments.length; i++ ) { rep.saveJobEntryAttribute( id_job, getObjectId(), i, "name", arguments[i] ); rep.saveJobEntryAttribute( id_job, getObjectId(), i, "filemask", filemasks[i] ); } } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryAddResultFilenames.UnableToSaveToRepo", String.valueOf( id_job ) ), dbe ); } }
Example #4
Source File: TableAgileMart.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Override public boolean dropTable() { TableOutputMeta meta = getMeta(); TableOutputData data = getData(); String schema = meta.getSchemaName(); String table = meta.getTableName(); if ( schema != null && !schema.equals( "" ) ) { table = schema + "." + table; } String sql = "drop table " + table + ";"; try { Result result = data.db.execStatement( sql ); int status = result.getExitStatus(); if ( status == 0 ) { util.updateMetadata( meta, -1 ); } return status == 0; } catch ( KettleDatabaseException e ) { message = "Could not drop table: " + table; logError( message, e ); } return false; }
Example #5
Source File: MySQLDatabaseMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Returns the column name for a MySQL field checking if the driver major version is "greater than" or "lower or equal" to 3. * * @param dbMetaData * @param rsMetaData * @param index * @return The column label if version is greater than 3 or the column name if version is lower or equal to 3. * @throws KettleDatabaseException */ public String getLegacyColumnName( DatabaseMetaData dbMetaData, ResultSetMetaData rsMetaData, int index ) throws KettleDatabaseException { if ( dbMetaData == null ) { throw new KettleDatabaseException( BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameNoDBMetaDataException" ) ); } if ( rsMetaData == null ) { throw new KettleDatabaseException( BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameNoRSMetaDataException" ) ); } try { return dbMetaData.getDriverMajorVersion() > 3 ? rsMetaData.getColumnLabel( index ) : rsMetaData.getColumnName( index ); } catch ( Exception e ) { throw new KettleDatabaseException( String.format( "%s: %s", BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameException" ), e.getMessage() ), e ); } }
Example #6
Source File: Database.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public String getDDL( String tableName, RowMetaInterface fields, String tk, boolean useAutoinc, String pk, boolean semicolon ) throws KettleDatabaseException { String retval; // First, check for reserved SQL in the input row r... databaseMeta.quoteReservedWords( fields ); String quotedTk = tk != null ? databaseMeta.quoteField( tk ) : null; if ( checkTableExists( tableName ) ) { retval = getAlterTableStatement( tableName, fields, quotedTk, useAutoinc, pk, semicolon ); } else { retval = getCreateTableStatement( tableName, fields, quotedTk, useAutoinc, pk, semicolon ); } return retval; }
Example #7
Source File: JobEntryColumnsExist.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "tablename", tablename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "schemaname", schemaname ); rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection ); // save the arguments... if ( arguments != null ) { for ( int i = 0; i < arguments.length; i++ ) { rep.saveJobEntryAttribute( id_job, getObjectId(), i, "name", arguments[i] ); } } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryColumnsExist.Meta.UnableSaveRep", "" + id_job ), dbe ); } }
Example #8
Source File: KettleDatabaseRepositoryDatabaseDelegate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public Collection<RowMetaAndData> getDatabaseAttributes() throws KettleDatabaseException, KettleValueException { List<RowMetaAndData> attrs = new ArrayList<RowMetaAndData>(); List<Object[]> rows = repository.connectionDelegate.getRows( "SELECT * FROM " + quoteTable( KettleDatabaseRepository.TABLE_R_DATABASE_ATTRIBUTE ), 0 ); for ( Object[] row : rows ) { RowMetaAndData rowWithMeta = new RowMetaAndData( repository.connectionDelegate.getReturnRowMeta(), row ); long id = rowWithMeta.getInteger( quote( KettleDatabaseRepository.FIELD_DATABASE_ATTRIBUTE_ID_DATABASE_ATTRIBUTE ), 0 ); if ( id > 0 ) { attrs.add( rowWithMeta ); } } return attrs; }
Example #9
Source File: KettleDatabaseRepositoryUserDelegate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveUserInfo( IUser userInfo ) throws KettleException { try { // Do we have a user id already? if ( userInfo.getObjectId() == null ) { userInfo.setObjectId( getUserID( userInfo.getLogin() ) ); // Get userid in the repository } if ( userInfo.getObjectId() == null ) { // This means the login doesn't exist in the database // and we have no id, so we don't know the old one... // Just grab the next user ID and do an insert: userInfo.setObjectId( repository.connectionDelegate.getNextUserID() ); repository.connectionDelegate.insertTableRow( "R_USER", fillTableRow( userInfo ) ); } else { repository.connectionDelegate.updateTableRow( "R_USER", "ID_USER", fillTableRow( userInfo ) ); } // Put a commit behind it! repository.commit(); } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "UserInfo.Error.SavingUser", userInfo.getLogin() ), dbe ); } }
Example #10
Source File: JobEntryDeleteFolders.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "arg_from_previous", argFromPrevious ); rep.saveJobEntryAttribute( id_job, getObjectId(), "limit_folders", limit_folders ); rep.saveJobEntryAttribute( id_job, getObjectId(), "success_condition", success_condition ); // save the arguments... if ( arguments != null ) { for ( int i = 0; i < arguments.length; i++ ) { rep.saveJobEntryAttribute( id_job, getObjectId(), i, "name", arguments[i] ); } } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryDeleteFolders.UnableToSaveToRepo", String .valueOf( id_job ) ), dbe ); } }
Example #11
Source File: Database.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * See if the table specified exists by getting db metadata. * * @param tablename The name of the table to check.<br> This is supposed to be the properly quoted name of the table * or the complete schema-table name combination. * @return true if the table exists, false if it doesn't. * @throws KettleDatabaseException * @deprecated Deprecated in favor of {@link #checkTableExists(String, String)} */ @Deprecated public boolean checkTableExistsByDbMeta( String schema, String tablename ) throws KettleDatabaseException { boolean isTableExist = false; if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "Database.Info.CheckingIfTableExistsInDbMetaData", tablename ) ); } try ( ResultSet resTables = getTableMetaData( schema, tablename ) ) { while ( resTables.next() ) { String resTableName = resTables.getString( TABLES_META_DATA_TABLE_NAME ); if ( tablename.equalsIgnoreCase( resTableName ) ) { if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "Database.Info.TableFound", tablename ) ); } isTableExist = true; break; } } } catch ( SQLException e ) { throw new KettleDatabaseException( BaseMessages.getString( PKG, "Database.Error.UnableToCheckExistingTable", tablename, databaseMeta.getName() ), e ); } return isTableExist; }
Example #12
Source File: Database.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void rollback( boolean force ) throws KettleDatabaseException { try { if ( !Utils.isEmpty( connectionGroup ) && !force ) { return; // Will be handled by Trans --> endProcessing() } if ( getDatabaseMetaData().supportsTransactions() ) { if ( connection != null ) { if ( log.isDebug() ) { log.logDebug( "Rollback on database connection [" + toString() + "]" ); } connection.rollback(); } } else { if ( log.isDetailed() ) { log.logDetailed( "No rollback possible on database connection [" + toString() + "]" ); } } } catch ( SQLException e ) { throw new KettleDatabaseException( "Error performing rollback on connection", e ); } }
Example #13
Source File: Database.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public String[] getTableTypes() throws KettleDatabaseException { try { ArrayList<String> types = new ArrayList<>(); ResultSet rstt = getDatabaseMetaData().getTableTypes(); while ( rstt.next() ) { String ttype = rstt.getString( "TABLE_TYPE" ); types.add( ttype ); } return types.toArray( new String[ types.size() ] ); } catch ( SQLException e ) { throw new KettleDatabaseException( "Unable to get table types from database!", e ); } }
Example #14
Source File: JobEntryWriteToLog.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { logmessage = rep.getJobEntryAttributeString( id_jobentry, "logmessage" ); entryLogLevel = LogLevel.getLogLevelForCode( rep.getJobEntryAttributeString( id_jobentry, "loglevel" ) ); logsubject = rep.getJobEntryAttributeString( id_jobentry, "logsubject" ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "WriteToLog.Error.UnableToLoadFromRepository.Label" ) + id_jobentry, dbe ); } }
Example #15
Source File: AsyncDatabaseAction.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public static void getSchemas( DatabaseMeta databaseMeta, Consumer<String[]> schemasConsumer ) { executeAction( databaseMeta, database -> { try { schemasConsumer.accept( database.getSchemas() ); } catch ( KettleDatabaseException | NullPointerException e ) { logError( databaseMeta, e ); schemasConsumer.accept( new String[ 0 ] ); } } ); }
Example #16
Source File: ValueMetaBaseTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void testMetdataPreviewSqlBigIntToPentahoInteger() throws SQLException, KettleDatabaseException { doReturn( Types.BIGINT ).when( resultSet ).getInt( "DATA_TYPE" ); ValueMetaInterface valueMeta = valueMetaBase.getMetadataPreview( dbMeta, resultSet ); assertTrue( valueMeta.isInteger() ); assertEquals( 0, valueMeta.getPrecision() ); assertEquals( 15, valueMeta.getLength() ); }
Example #17
Source File: JobEntryEvalFilesMetrics.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { // save the arguments... if ( sourceFileFolder != null ) { for ( int i = 0; i < sourceFileFolder.length; i++ ) { rep.saveJobEntryAttribute( id_job, getObjectId(), i, "source_filefolder", sourceFileFolder[i] ); rep.saveJobEntryAttribute( id_job, getObjectId(), i, "wildcard", sourceWildcard[i] ); rep.saveJobEntryAttribute( id_job, getObjectId(), i, "include_subFolders", sourceIncludeSubfolders[i] ); } } rep.saveJobEntryAttribute( id_job, getObjectId(), "result_filenames_wildcard", resultFilenamesWildcard ); rep.saveJobEntryAttribute( id_job, getObjectId(), "result_field_file", ResultFieldFile ); rep.saveJobEntryAttribute( id_job, getObjectId(), "result_field_wild", ResultFieldWildcard ); rep.saveJobEntryAttribute( id_job, getObjectId(), "result_field_includesubfolders", ResultFieldIncludesubFolders ); rep.saveJobEntryAttribute( id_job, getObjectId(), "comparevalue", comparevalue ); rep.saveJobEntryAttribute( id_job, getObjectId(), "minvalue", minvalue ); rep.saveJobEntryAttribute( id_job, getObjectId(), "maxvalue", maxvalue ); rep.saveJobEntryAttribute( id_job, getObjectId(), "successnumbercondition", JobEntrySimpleEval .getSuccessNumberConditionCode( successConditionType ) ); rep.saveJobEntryAttribute( id_job, getObjectId(), "scale", getScaleCode( scale ) ); rep.saveJobEntryAttribute( id_job, getObjectId(), "source_files", getSourceFilesCode( sourceFiles ) ); rep .saveJobEntryAttribute( id_job, getObjectId(), "evaluation_type", getEvaluationTypeCode( evaluationType ) ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.Exception.UnableSaveRep" ) + id_job, dbe ); } }
Example #18
Source File: Database.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void insertRow( String schemaName, String tableName, RowMetaInterface fields, Object[] data ) throws KettleDatabaseException { prepareInsert( fields, schemaName, tableName ); setValuesInsert( fields, data ); insertRow(); closeInsert(); }
Example #19
Source File: JobEntryTruncateTablesDialog.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private void getTableName() { DatabaseMeta databaseMeta = jobMeta.findDatabase( wConnection.getText() ); if ( databaseMeta != null ) { Database database = new Database( loggingObject, databaseMeta ); try { database.connect(); String[] Tablenames = database.getTablenames(); Arrays.sort( Tablenames ); EnterSelectionDialog dialog = new EnterSelectionDialog( shell, Tablenames, BaseMessages.getString( PKG, "JobTruncateTables.SelectTables.Title" ), BaseMessages.getString( PKG, "JobTruncateTables.SelectTables.Message" ) ); dialog.setMulti( true ); dialog.setAvoidQuickSearch(); if ( dialog.open() != null ) { int[] idx = dialog.getSelectionIndeces(); for ( int i = 0; i < idx.length; i++ ) { TableItem tableItem = new TableItem( wFields.table, SWT.NONE ); tableItem.setText( 1, Tablenames[idx[i]] ); } } } catch ( KettleDatabaseException e ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "System.Dialog.Error.Title" ), BaseMessages.getString( PKG, "JobEntryTruncateTables.ConnectionError.DialogMessage" ), e ); } finally { if ( database != null ) { database.disconnect(); } } wFields.removeEmptyRows(); wFields.setRowNums(); wFields.optWidth( true ); } }
Example #20
Source File: KettleDatabaseRepository.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public boolean test() { try { getDatabase().connect(); } catch ( KettleDatabaseException kde ) { return false; } return true; }
Example #21
Source File: JobEntryDummy.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void saveRep( Repository rep, ObjectId id_job ) throws KettleException { try { super.saveRep( rep, id_job ); rep.saveJobEntryAttribute( id_job, getObjectId(), SOURCEDIRECTORY, sourceDirectory ); rep.saveJobEntryAttribute( id_job, getObjectId(), TARGETDIRECTORY, targetDirectory ); rep.saveJobEntryAttribute( id_job, getObjectId(), WILDCARD, wildcard ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( "unable to save jobentry of type 'file exists' to the repository for id_job=" + id_job, dbe ); } }
Example #22
Source File: Database.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public Object[] getLastLogDate( String logtable, String name, boolean job, LogStatus status ) throws KettleDatabaseException { Object[] row = null; String jobtrans = job ? databaseMeta.quoteField( "JOBNAME" ) : databaseMeta.quoteField( "TRANSNAME" ); String sql = ""; sql += " SELECT " + databaseMeta.quoteField( "ENDDATE" ) + ", " + databaseMeta.quoteField( "DEPDATE" ) + ", " + databaseMeta.quoteField( "STARTDATE" ); sql += " FROM " + logtable; sql += " WHERE " + databaseMeta.quoteField( "ERRORS" ) + " = 0"; sql += " AND " + databaseMeta.quoteField( "STATUS" ) + " = 'end'"; sql += " AND " + jobtrans + " = ?"; sql += " ORDER BY " + databaseMeta.quoteField( "LOGDATE" ) + " DESC, " + databaseMeta.quoteField( "ENDDATE" ) + " DESC"; try { pstmt = connection.prepareStatement( databaseMeta.stripCR( sql ) ); RowMetaInterface r = new RowMeta(); r.addValueMeta( new ValueMetaString( "TRANSNAME", 255, -1 ) ); setValues( r, new Object[] { name } ); try ( ResultSet res = pstmt.executeQuery() ) { rowMeta = getRowInfo( res.getMetaData(), false, false ); row = getRow( res ); } } catch ( SQLException ex ) { throw new KettleDatabaseException( "Unable to obtain last logdate from table " + logtable, ex ); } return row; }
Example #23
Source File: AsyncDatabaseActionTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void executeAction() throws InterruptedException, ExecutionException, TimeoutException { CompletableFuture<List<Object[]>> rowMetaCompletion = new CompletableFuture<>(); AsyncDatabaseAction.executeAction( dbMeta, database -> { try { rowMetaCompletion.complete( database.getFirstRows( "BAR", 2 ) ); } catch ( KettleDatabaseException e ) { throw new IllegalStateException( e ); } } ); List<Object[]> rows = rowMetaCompletion.get( COMPLETION_TIMEOUT, TimeUnit.MILLISECONDS ); assertThat( rows.size(), equalTo( 1 ) ); assertThat( rows.get( 0 )[ 0 ], equalTo( 123L ) ); assertThat( rows.get( 0 )[ 1 ], equalTo( 321L ) ); }
Example #24
Source File: PDI5436Test.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private Database mockDatabase() throws KettleDatabaseException { Database databaseMock = mock( Database.class ); RowMeta databaseRowMeta = new RowMeta(); databaseRowMeta.addValueMeta( new ValueMetaString( "id" ) ); databaseRowMeta.addValueMeta( new ValueMetaString( "value" ) ); doReturn( databaseRowMeta ).when( databaseMock ).getTableFields( anyString() ); doReturn( databaseRowMeta ).when( databaseMock ).getTableFieldsMeta( anyString(), anyString() ); doReturn( Arrays.asList( new Object[][] { { "1", "value" } } ) ).when( databaseMock ).getRows( anyString(), anyInt() ); doReturn( databaseRowMeta ).when( databaseMock ).getReturnRowMeta(); return databaseMock; }
Example #25
Source File: Database.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void closeProcedureStatement() throws KettleDatabaseException { // CHE: close the callable statement involved in the stored // procedure call! try { if ( cstmt != null ) { cstmt.close(); cstmt = null; } } catch ( SQLException ex ) { throw new KettleDatabaseException( BaseMessages.getString( PKG, "Database.Exception.ErrorClosingCallableStatement" ), ex ); } }
Example #26
Source File: JobEntryEval.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "script", script ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryEval.UnableToSaveToRepo", String .valueOf( id_job ) ), dbe ); } }
Example #27
Source File: JobEntryEval.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { script = rep.getJobEntryAttributeString( id_jobentry, "script" ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryEval.UnableToLoadFromRepo", String .valueOf( id_jobentry ) ), dbe ); } }
Example #28
Source File: JobEntryHTTP.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "url", url ); rep.saveJobEntryAttribute( id_job, getObjectId(), "targetfilename", targetFilename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "file_appended", fileAppended ); rep.saveJobEntryAttribute( id_job, getObjectId(), "date_time_added", dateTimeAdded ); rep.saveJobEntryAttribute( id_job, getObjectId(), "targetfilename_extension", targetFilenameExtension ); rep.saveJobEntryAttribute( id_job, getObjectId(), "uploadfilename", uploadFilename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "url_fieldname", urlFieldname ); rep.saveJobEntryAttribute( id_job, getObjectId(), "upload_fieldname", uploadFieldname ); rep.saveJobEntryAttribute( id_job, getObjectId(), "dest_fieldname", destinationFieldname ); rep.saveJobEntryAttribute( id_job, getObjectId(), "run_every_row", runForEveryRow ); rep.saveJobEntryAttribute( id_job, getObjectId(), "username", username ); rep.saveJobEntryAttribute( id_job, getObjectId(), "password", Encr .encryptPasswordIfNotUsingVariables( password ) ); rep.saveJobEntryAttribute( id_job, getObjectId(), "proxy_host", proxyHostname ); rep.saveJobEntryAttribute( id_job, getObjectId(), "proxy_port", proxyPort ); rep.saveJobEntryAttribute( id_job, getObjectId(), "non_proxy_hosts", nonProxyHosts ); rep.saveJobEntryAttribute( id_job, getObjectId(), "addfilenameresult", addfilenameresult ); if ( headerName != null ) { for ( int i = 0; i < headerName.length; i++ ) { rep.saveJobEntryAttribute( id_job, getObjectId(), i, "header_name", headerName[ i ] ); rep.saveJobEntryAttribute( id_job, getObjectId(), i, "header_value", headerValue[ i ] ); } } } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Unable to load job entry of type 'HTTP' to the repository for id_job=" + id_job, dbe ); } }
Example #29
Source File: DatabaseMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public static final int[] getAccessTypeList( String dbTypeDesc ) { try { DatabaseInterface di = findDatabaseInterface( dbTypeDesc ); return di.getAccessTypeList(); } catch ( KettleDatabaseException kde ) { return null; } }
Example #30
Source File: Database.java From pentaho-kettle with Apache License 2.0 | 5 votes |
/** * Return SQL CREATION statement for a Table * * @param tableName The table to create * @throws KettleDatabaseException */ public String getDDLCreationTable( String tableName, RowMetaInterface fields ) throws KettleDatabaseException { String retval; // First, check for reserved SQL in the input row r... databaseMeta.quoteReservedWords( fields ); String quotedTk = databaseMeta.quoteField( null ); retval = getCreateTableStatement( tableName, fields, quotedTk, false, null, true ); return retval; }