org.pentaho.di.core.database.DatabaseMeta Java Examples
The following examples show how to use
org.pentaho.di.core.database.DatabaseMeta.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: JobEntrySyslog.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); port = XMLHandler.getTagValue( entrynode, "port" ); serverName = XMLHandler.getTagValue( entrynode, "servername" ); facility = XMLHandler.getTagValue( entrynode, "facility" ); priority = XMLHandler.getTagValue( entrynode, "priority" ); message = XMLHandler.getTagValue( entrynode, "message" ); datePattern = XMLHandler.getTagValue( entrynode, "datePattern" ); addTimestamp = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "addTimestamp" ) ); addHostname = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "addHostname" ) ); } catch ( KettleXMLException xe ) { throw new KettleXMLException( "Unable to load job entry of type 'Syslog' from XML node", xe ); } }
Example #2
Source File: AdvancedMQLQueryImplIT.java From pentaho-metadata with GNU Lesser General Public License v2.1 | 6 votes |
public void testAliasedJoin() throws Exception { BusinessModel model = getDefaultModel(); BusinessColumn bc1 = model.findBusinessColumn( "bc1" ); //$NON-NLS-1$ BusinessColumn bc3 = model.findBusinessColumn( "bc3" ); //$NON-NLS-1$ DatabaseMeta databaseMeta = new DatabaseMeta( "", "ORACLE", "Native", "", "", "", "", "" ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ //$NON-NLS-5$ //$NON-NLS-6$ //$NON-NLS-7$ AdvancedMQLQuery myTest = new AdvancedMQLQuery( null, model, databaseMeta, "en_US" ); myTest.addSelection( new AdvancedMQLQuery.AliasedSelection( bc1, null ) ); myTest.addSelection( new AdvancedMQLQuery.AliasedSelection( bc1, "alias1" ) ); myTest.addSelection( new AdvancedMQLQuery.AliasedSelection( bc3, null ) ); myTest.addSelection( new AdvancedMQLQuery.AliasedSelection( "[alias1.bc1] * 3" ) ); myTest.addConstraint( "AND", "[alias1.bc1] > 10" ); myTest.addConstraint( "AND", "[bt3.bc3] > 10" ); // SQLQueryTest.printOutJava(myTest.getQuery().getQuery()); assertEqualsIgnoreWhitespaces( "SELECT DISTINCT " + "bt1.pc1 AS COL0 ," + "bt1_alias1.pc1 AS COL1 ," + "bt3.pc3 AS COL2 , " + "bt1_alias1.pc1 * 3 AS COL3 " + "FROM " + "pt1 bt1 ," + "pt2 bt2 ," + "pt3 bt3 ," + "pt1 bt1_alias1 ," + "pt2 bt2_alias1 " + "WHERE " + "( bt1.pc1 = bt2.pc2 ) " + "AND ( bt3.pc3 = bt2.pc2 ) " + "AND ( bt1_alias1.pc1 = bt2_alias1.pc2 ) " + "AND ( bt3.pc3 = bt2_alias1.pc2 ) " + "AND (( bt1_alias1.pc1 > 10 ) " + "AND ( bt3.pc3 > 10 ))", myTest.getQuery().getQuery() ); }
Example #3
Source File: DatabaseMetaStoreUtil.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public static List<DatabaseMeta> getDatabaseElements( IMetaStore metaStore ) throws MetaStoreException { List<DatabaseMeta> databases = new ArrayList<DatabaseMeta>(); // If the data type doesn't exist, it's an empty list... // IMetaStoreElementType elementType = metaStore.getElementTypeByName( PentahoDefaults.NAMESPACE, PentahoDefaults.DATABASE_CONNECTION_ELEMENT_TYPE_NAME ); if ( elementType == null ) { return databases; } List<IMetaStoreElement> elements = metaStore.getElements( PentahoDefaults.NAMESPACE, elementType ); for ( IMetaStoreElement element : elements ) { try { DatabaseMeta databaseMeta = loadDatabaseMetaFromDatabaseElement( metaStore, element ); databases.add( databaseMeta ); } catch ( Exception e ) { throw new MetaStoreException( "Unable to load database from element with name '" + element.getName() + "' and type '" + elementType.getName() + "'", e ); } } return databases; }
Example #4
Source File: ExecSQLRowMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { this.databasesList = databases; try { databaseMeta = rep.loadDatabaseMetaFromStepAttribute( id_step, "id_connection", databases ); commitSize = (int) rep.getStepAttributeInteger( id_step, "commit" ); sqlField = rep.getStepAttributeString( id_step, "sql_field" ); insertField = rep.getStepAttributeString( id_step, "insert_field" ); updateField = rep.getStepAttributeString( id_step, "update_field" ); deleteField = rep.getStepAttributeString( id_step, "delete_field" ); readField = rep.getStepAttributeString( id_step, "read_field" ); sqlFromfile = rep.getStepAttributeBoolean( id_step, "sqlFromfile" ); String sendOneStatementString = rep.getStepAttributeString( id_step, "sendOneStatement" ); if ( Utils.isEmpty( sendOneStatementString ) ) { sendOneStatement = true; } else { sendOneStatement = rep.getStepAttributeBoolean( id_step, "sendOneStatement" ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "ExecSQLRowMeta.Exception.UnexpectedErrorReadingStepInfo" ), e ); } }
Example #5
Source File: JobEntrySQLDialog.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Copy information from the meta-data input to the dialog fields. */ public void getData() { wName.setText( Const.nullToEmpty( jobEntry.getName() ) ); wSQL.setText( Const.nullToEmpty( jobEntry.getSQL() ) ); DatabaseMeta dbinfo = jobEntry.getDatabase(); if ( dbinfo != null && dbinfo.getName() != null ) { wConnection.setText( dbinfo.getName() ); } else { wConnection.setText( "" ); } wUseSubs.setSelection( jobEntry.getUseVariableSubstitution() ); wSQLFromFile.setSelection( jobEntry.getSQLFromFile() ); wSendOneStatement.setSelection( jobEntry.isSendOneStatement() ); wFilename.setText( Const.nullToEmpty( jobEntry.getSQLFilename() ) ); wName.selectAll(); wName.setFocus(); }
Example #6
Source File: JobEntryColumnsExist.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); tablename = XMLHandler.getTagValue( entrynode, "tablename" ); schemaname = XMLHandler.getTagValue( entrynode, "schemaname" ); String dbname = XMLHandler.getTagValue( entrynode, "connection" ); connection = DatabaseMeta.findDatabase( databases, dbname ); Node fields = XMLHandler.getSubNode( entrynode, "fields" ); // How many field arguments? int nrFields = XMLHandler.countNodes( fields, "field" ); allocate( nrFields ); // Read them all... for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); arguments[i] = XMLHandler.getTagValue( fnode, "name" ); } } catch ( KettleException e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryColumnsExist.Meta.UnableLoadXml" ), e ); } }
Example #7
Source File: KafkaConsumerMeta.java From pentaho-kafka-consumer with Apache License 2.0 | 6 votes |
@Override public void readRep(Repository rep, IMetaStore metaStore, ObjectId stepId, List<DatabaseMeta> databases) throws KettleException { try { topic = rep.getStepAttributeString(stepId, ATTR_TOPIC); field = rep.getStepAttributeString(stepId, ATTR_FIELD); keyField = rep.getStepAttributeString(stepId, ATTR_KEY_FIELD); limit = rep.getStepAttributeString(stepId, ATTR_LIMIT); timeout = rep.getStepAttributeString(stepId, ATTR_TIMEOUT); stopOnEmptyTopic = rep.getStepAttributeBoolean(stepId, ATTR_STOP_ON_EMPTY_TOPIC); String kafkaPropsXML = rep.getStepAttributeString(stepId, ATTR_KAFKA); if (kafkaPropsXML != null) { kafkaProperties.loadFromXML(new ByteArrayInputStream(kafkaPropsXML.getBytes())); } // Support old versions: for (String name : KAFKA_PROPERTIES_NAMES) { String value = rep.getStepAttributeString(stepId, name); if (value != null) { kafkaProperties.put(name, value); } } } catch (Exception e) { throw new KettleException("KafkaConsumerMeta.Exception.loadRep", e); } }
Example #8
Source File: JobGeneratorTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Before public void setUp() throws Exception { final StarDomain starDomain = mock( StarDomain.class ); final Domain domain = mock( Domain.class ); when( domain.getProperty( eq( DefaultIDs.DOMAIN_TARGET_DATABASE ) ) ).thenReturn( "test_domain_target_db" ); when( starDomain.getDomain() ).thenReturn( domain ); final Repository repository = mock( Repository.class ); final RepositoryDirectoryInterface targetDirectory = mock( RepositoryDirectoryInterface.class ); final DatabaseMeta meta = Mockito.mock( DatabaseMeta.class ); Mockito.when( meta.getName() ).thenReturn( "test_domain_target_db" ); final LinkedList<DatabaseMeta> databases = new LinkedList<DatabaseMeta>() { { add( meta ); } }; final String locale = Locale.US.toString(); jobGenerator = new JobGenerator( starDomain, repository, targetDirectory, databases, locale ); }
Example #9
Source File: BeamConsumeMeta.java From kettle-beam with Apache License 2.0 | 6 votes |
@Override public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { bootstrapServers = XMLHandler.getTagValue( stepnode, BOOTSTRAP_SERVERS ); topics = XMLHandler.getTagValue( stepnode, TOPICS ); keyField = XMLHandler.getTagValue( stepnode, KEY_FIELD ); messageField = XMLHandler.getTagValue( stepnode, MESSAGE_FIELD ); groupId = XMLHandler.getTagValue( stepnode, GROUP_ID ); usingProcessingTime="Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, USE_PROCESSING_TIME ) ); usingLogAppendTime="Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, USE_LOG_APPEND_TIME ) ); usingCreateTime="Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, USE_CREATE_TIME ) ); restrictedToCommitted="Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, RESTRICT_TO_COMMITTED ) ); allowingCommitOnConsumedOffset="Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, ALLOW_COMMIT_ON_CONSUMED ) ); configOptions = new ArrayList<>( ); Node optionsNode = XMLHandler.getSubNode( stepnode, CONFIG_OPTIONS ); List<Node> optionNodes = XMLHandler.getNodes( optionsNode, CONFIG_OPTION ); for (Node optionNode : optionNodes) { String parameter = XMLHandler.getTagValue( optionNode, CONFIG_OPTION_PARAMETER ); String value = XMLHandler.getTagValue( optionNode, CONFIG_OPTION_VALUE ); ConfigOption.Type type = ConfigOption.Type.getTypeFromName( XMLHandler.getTagValue( optionNode, CONFIG_OPTION_TYPE ) ); configOptions.add( new ConfigOption(parameter, value, type)); } }
Example #10
Source File: PGBulkLoader.java From pentaho-kettle with Apache License 2.0 | 6 votes |
void processTruncate() throws Exception { Connection connection = data.db.getConnection(); String loadAction = environmentSubstitute( meta.getLoadAction() ); if ( loadAction.equalsIgnoreCase( "truncate" ) ) { DatabaseMeta dm = meta.getDatabaseMeta(); String tableName = dm.getQuotedSchemaTableCombination( environmentSubstitute( meta.getSchemaName() ), environmentSubstitute( meta.getTableName() ) ); logBasic( "Launching command: " + "TRUNCATE " + tableName ); Statement statement = connection.createStatement(); try { statement.executeUpdate( "TRUNCATE " + tableName ); } catch ( Exception ex ) { throw new KettleException( "Error while truncating " + tableName, ex ); } finally { statement.close(); } } }
Example #11
Source File: XMLJoinMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { targetXMLstep = rep.getStepAttributeString( id_step, "targetXMLstep" ); targetXMLfield = rep.getStepAttributeString( id_step, "targetXMLfield" ); sourceXMLstep = rep.getStepAttributeString( id_step, "sourceXMLstep" ); sourceXMLfield = rep.getStepAttributeString( id_step, "sourceXMLfield" ); targetXPath = rep.getStepAttributeString( id_step, "targetXPath" ); complexJoin = rep.getStepAttributeBoolean( id_step, "complexJoin" ); joinCompareField = rep.getStepAttributeString( id_step, "joinCompareField" ); valueXMLfield = rep.getStepAttributeString( id_step, "valueXMLfield" ); encoding = rep.getStepAttributeString( id_step, "encoding" ); omitXMLHeader = rep.getStepAttributeBoolean( id_step, "omitXMLHeader" ); omitNullValues = rep.getStepAttributeBoolean( id_step, "omitNullValues" ); } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } }
Example #12
Source File: JobEntryTalendJobExec.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); filename = XMLHandler.getTagValue( entrynode, "filename" ); className = XMLHandler.getTagValue( entrynode, "class_name" ); } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryTalendJobExec.ERROR_0001_Cannot_Load_Job_Entry_From_Xml_Node" ), xe ); } }
Example #13
Source File: DatabaseLookupMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { streamKeyField1 = null; returnValueField = null; readData( stepnode, databases ); }
Example #14
Source File: JobLogTable.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public static JobLogTable getDefault( VariableSpace space, HasDatabasesInterface databasesInterface ) { JobLogTable table = new JobLogTable( space, databasesInterface ); table.fields.add( new LogTableField( ID.ID_JOB.id, true, false, "ID_JOB", BaseMessages.getString( PKG, "JobLogTable.FieldName.BatchID" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.BatchID" ), ValueMetaInterface.TYPE_INTEGER, 8 ) ); table.fields.add( new LogTableField( ID.CHANNEL_ID.id, true, false, "CHANNEL_ID", BaseMessages.getString( PKG, "JobLogTable.FieldName.ChannelID" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.ChannelID" ), ValueMetaInterface.TYPE_STRING, 255 ) ); table.fields.add( new LogTableField( ID.JOBNAME.id, true, false, "JOBNAME", BaseMessages.getString( PKG, "JobLogTable.FieldName.JobName" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.JobName" ), ValueMetaInterface.TYPE_STRING, 255 ) ); table.fields.add( new LogTableField( ID.STATUS.id, true, false, "STATUS", BaseMessages.getString( PKG, "JobLogTable.FieldName.Status" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.Status" ), ValueMetaInterface.TYPE_STRING, 15 ) ); table.fields.add( new LogTableField( ID.LINES_READ.id, true, false, "LINES_READ", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesRead" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesRead" ), ValueMetaInterface.TYPE_INTEGER, 18 ) ); table.fields.add( new LogTableField( ID.LINES_WRITTEN.id, true, false, "LINES_WRITTEN", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesWritten" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesWritten" ), ValueMetaInterface.TYPE_INTEGER, 18 ) ); table.fields.add( new LogTableField( ID.LINES_UPDATED.id, true, false, "LINES_UPDATED", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesUpdated" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesUpdated" ), ValueMetaInterface.TYPE_INTEGER, 18 ) ); table.fields.add( new LogTableField( ID.LINES_INPUT.id, true, false, "LINES_INPUT", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesInput" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesInput" ), ValueMetaInterface.TYPE_INTEGER, 18 ) ); table.fields.add( new LogTableField( ID.LINES_OUTPUT.id, true, false, "LINES_OUTPUT", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesOutput" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesOutput" ), ValueMetaInterface.TYPE_INTEGER, 18 ) ); table.fields.add( new LogTableField( ID.LINES_REJECTED.id, true, false, "LINES_REJECTED", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesRejected" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesRejected" ), ValueMetaInterface.TYPE_INTEGER, 18 ) ); table.fields.add( new LogTableField( ID.ERRORS.id, true, false, "ERRORS", BaseMessages.getString( PKG, "JobLogTable.FieldName.Errors" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.Errors" ), ValueMetaInterface.TYPE_INTEGER, 18 ) ); table.fields.add( new LogTableField( ID.STARTDATE.id, true, false, "STARTDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.StartDateRange" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.StartDateRange" ), ValueMetaInterface.TYPE_DATE, -1 ) ); table.fields.add( new LogTableField( ID.ENDDATE.id, true, false, "ENDDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.EndDateRange" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.EndDateRange" ), ValueMetaInterface.TYPE_DATE, -1 ) ); table.fields.add( new LogTableField( ID.LOGDATE.id, true, false, "LOGDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.LogDate" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LogDate" ), ValueMetaInterface.TYPE_DATE, -1 ) ); table.fields.add( new LogTableField( ID.DEPDATE.id, true, false, "DEPDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.DepDate" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.DepDate" ), ValueMetaInterface.TYPE_DATE, -1 ) ); table.fields.add( new LogTableField( ID.REPLAYDATE.id, true, false, "REPLAYDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.ReplayDate" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.ReplayDate" ), ValueMetaInterface.TYPE_DATE, -1 ) ); table.fields.add( new LogTableField( ID.LOG_FIELD.id, true, false, "LOG_FIELD", BaseMessages.getString( PKG, "JobLogTable.FieldName.LogField" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LogField" ), ValueMetaInterface.TYPE_STRING, DatabaseMeta.CLOB_LENGTH ) ); table.fields.add( new LogTableField( ID.EXECUTING_SERVER.id, false, false, "EXECUTING_SERVER", BaseMessages.getString( PKG, "JobLogTable.FieldName.ExecutingServer" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.ExecutingServer" ), ValueMetaInterface.TYPE_STRING, 255 ) ); table.fields.add( new LogTableField( ID.EXECUTING_USER.id, false, false, "EXECUTING_USER", BaseMessages.getString( PKG, "JobLogTable.FieldName.ExecutingUser" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.ExecutingUser" ), ValueMetaInterface.TYPE_STRING, 255 ) ); table.fields.add( new LogTableField( ID.START_JOB_ENTRY.id, false, false, "START_JOB_ENTRY", BaseMessages.getString( PKG, "JobLogTable.FieldName.StartingJobEntry" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.StartingJobEntry" ), ValueMetaInterface.TYPE_STRING, 255 ) ); table.fields.add( new LogTableField( ID.CLIENT.id, false, false, "CLIENT", BaseMessages.getString( PKG, "JobLogTable.FieldName.Client" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.Client" ), ValueMetaInterface.TYPE_STRING, 255 ) ); table.findField( ID.ID_JOB ).setKey( true ); table.findField( ID.LOGDATE ).setLogDateField( true ); table.findField( ID.LOG_FIELD ).setLogField( true ); table.findField( ID.CHANNEL_ID ).setVisible( false ); table.findField( ID.JOBNAME ).setVisible( false ); table.findField( ID.STATUS ).setStatusField( true ); table.findField( ID.ERRORS ).setErrorsField( true ); table.findField( ID.JOBNAME ).setNameField( true ); return table; }
Example #15
Source File: CombinationLookupMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public DatabaseMeta[] getUsedDatabaseConnections() { if ( databaseMeta != null ) { return new DatabaseMeta[] { databaseMeta }; } else { return super.getUsedDatabaseConnections(); } }
Example #16
Source File: SynchronizeAfterMergeDialog.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private void getSchemaNames() { DatabaseMeta databaseMeta = transMeta.findDatabase( wConnection.getText() ); if ( databaseMeta != null ) { Database database = new Database( loggingObject, databaseMeta ); try { database.connect(); String[] schemas = database.getSchemas(); if ( null != schemas && schemas.length > 0 ) { schemas = Const.sortStrings( schemas ); EnterSelectionDialog dialog = new EnterSelectionDialog( shell, schemas, BaseMessages.getString( PKG, "SynchronizeAfterMergeDialog.AvailableSchemas.Title", wConnection.getText() ), BaseMessages .getString( PKG, "SynchronizeAfterMergeDialog.AvailableSchemas.Message", wConnection.getText() ) ); String d = dialog.open(); if ( d != null ) { wSchema.setText( Const.NVL( d, "" ) ); setTableFieldCombo(); } } else { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage( BaseMessages.getString( PKG, "SynchronizeAfterMergeDialog.NoSchema.Error" ) ); mb.setText( BaseMessages.getString( PKG, "SynchronizeAfterMergeDialog.GetSchemas.Error" ) ); mb.open(); } } catch ( Exception e ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "System.Dialog.Error.Title" ), BaseMessages .getString( PKG, "SynchronizeAfterMergeDialog.ErrorGettingSchemas" ), e ); } finally { database.disconnect(); } } }
Example #17
Source File: CreateDatabaseWizardPage2.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public DatabaseMeta getDatabaseInfo() { if ( wUsername.getText() != null && wUsername.getText().length() > 0 ) { databaseMeta.setUsername( wUsername.getText() ); } if ( wPassword.getText() != null && wPassword.getText().length() > 0 ) { databaseMeta.setPassword( wPassword.getText() ); } wTest.setEnabled( ( (BaseDatabaseMeta) databaseMeta.getDatabaseInterface() ).canTest() ); return databaseMeta; }
Example #18
Source File: JobEntryDeleteResultFilenames.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { foldername = rep.getJobEntryAttributeString( id_jobentry, "foldername" ); specifywildcard = rep.getJobEntryAttributeBoolean( id_jobentry, "specify_wildcard" ); wildcard = rep.getJobEntryAttributeString( id_jobentry, "wildcard" ); wildcardexclude = rep.getJobEntryAttributeString( id_jobentry, "wildcardexclude" ); } catch ( KettleException dbe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryDeleteResultFilenames.CanNotLoadFromRep", "" + id_jobentry, dbe.getMessage() ) ); } }
Example #19
Source File: JobEntryDummy.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); sourceDirectory = XMLHandler.getTagValue( entrynode, SOURCEDIRECTORY ); targetDirectory = XMLHandler.getTagValue( entrynode, TARGETDIRECTORY ); wildcard = XMLHandler.getTagValue( entrynode, WILDCARD ); } catch ( KettleXMLException xe ) { throw new KettleXMLException( "Unable to load file exists job entry from XML node", xe ); } }
Example #20
Source File: CreateDatabaseWizardPageInformix.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public DatabaseMeta getDatabaseInfo() { if ( wServername.getText() != null && wServername.getText().length() > 0 ) { info.setServername( wServername.getText() ); } return info; }
Example #21
Source File: SpoonJobDelegate.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private void setTransMetaNote( DatabaseMeta sourceDbInfo, DatabaseMeta targetDbInfo, String[] tables, int i, TransMeta transMeta ) { String note = BaseMessages.getString( PKG, "Spoon.RipDB.Monitor.Note1" ) + tables[i] + BaseMessages.getString( PKG, "Spoon.RipDB.Monitor.Note2" ) + sourceDbInfo + "]" + Const.CR; note += BaseMessages.getString( PKG, "Spoon.RipDB.Monitor.Note3" ) + tables[i] + BaseMessages.getString( PKG, "Spoon.RipDB.Monitor.Note4" ) + targetDbInfo + "]"; NotePadMeta ni = new NotePadMeta( note, 150, 10, -1, -1 ); transMeta.addNote( ni ); }
Example #22
Source File: SqlGenerator.java From pentaho-metadata with GNU Lesser General Public License v2.1 | 5 votes |
/** * this method adds the order by statements to the query model * * @param query * SQL query model. * @param model * The business model. * @param orderBy * List of order bys. * @param databaseMeta * Database Info. * @param locale * Locale String. * @param tableAliases * Aliases of the tables to be used during query generation. * @param columnsMap * The column map is a unique mapping of Column alias to the column ID. * @param parameters * Parameters to be used during query generation. * @param genAsPreparedStatement * Tells the method generate query as prepared statement. */ protected void generateOrderBy( SQLQueryModel query, LogicalModel model, List<Order> orderBy, DatabaseMeta databaseMeta, String locale, Map<LogicalTable, String> tableAliases, Map<String, String> columnsMap, Map<String, Object> parameters, boolean genAsPreparedStatement ) { if ( orderBy != null ) { for ( Order orderItem : orderBy ) { LogicalColumn businessColumn = orderItem.getSelection().getLogicalColumn(); String alias = null; if ( columnsMap != null ) { // The column map is a unique mapping of Column alias to the column ID // Here we have the column ID and we need the alias. // We need to do the order by on the alias, not the column name itself. // For most databases, it can be both, but the alias is more standard. // // Using the column name and not the alias caused an issue on Apache Derby. // for ( String key : columnsMap.keySet() ) { String value = columnsMap.get( key ); if ( value.equals( businessColumn.getId() ) ) { // Found it: the alias is the key alias = key; break; } } } SqlAndTables sqlAndTables = getBusinessColumnSQL( model, orderItem.getSelection(), tableAliases, parameters, genAsPreparedStatement, databaseMeta, locale ); query.addOrderBy( sqlAndTables.getSql(), databaseMeta.quoteField( alias ), orderItem.getType() != Type.ASC ? OrderType.DESCENDING : null ); } } }
Example #23
Source File: JobEntryFTPSPUT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { serverName = rep.getJobEntryAttributeString( id_jobentry, "servername" ); serverPort = rep.getJobEntryAttributeString( id_jobentry, "serverport" ); userName = rep.getJobEntryAttributeString( id_jobentry, "username" ); password = Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString( id_jobentry, "password" ) ); remoteDirectory = rep.getJobEntryAttributeString( id_jobentry, "remoteDirectory" ); localDirectory = rep.getJobEntryAttributeString( id_jobentry, "localDirectory" ); wildcard = rep.getJobEntryAttributeString( id_jobentry, "wildcard" ); binaryMode = rep.getJobEntryAttributeBoolean( id_jobentry, "binary" ); timeout = (int) rep.getJobEntryAttributeInteger( id_jobentry, "timeout" ); remove = rep.getJobEntryAttributeBoolean( id_jobentry, "remove" ); onlyPuttingNewFiles = rep.getJobEntryAttributeBoolean( id_jobentry, "only_new" ); activeConnection = rep.getJobEntryAttributeBoolean( id_jobentry, "active" ); proxyHost = rep.getJobEntryAttributeString( id_jobentry, "proxy_host" ); proxyPort = rep.getJobEntryAttributeString( id_jobentry, "proxy_port" ); proxyUsername = rep.getJobEntryAttributeString( id_jobentry, "proxy_username" ); proxyPassword = rep.getJobEntryAttributeString( id_jobentry, "proxy_password" ); connectionType = FTPSConnection.getConnectionTypeByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "connection_type" ), "" ) ); } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobFTPSPUT.UnableToLoadFromRepo", String .valueOf( id_jobentry ) ), dbe ); } }
Example #24
Source File: EditConnectionListenerTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Test public void replaceSharedConnectionDoesNotExecuted_for_nonshared_connection() { dialog.transMeta.addDatabase( createDefaultDatabase( false ) ); editConnectionListener.widgetSelected( null ); verify( editConnectionListener, never() ).replaceSharedConnection( any( DatabaseMeta.class ), any( DatabaseMeta.class ) ); }
Example #25
Source File: SQLFileOutputMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private void readData( Node stepnode, List<? extends SharedObjectInterface> databases ) throws KettleXMLException { try { String con = XMLHandler.getTagValue( stepnode, "connection" ); databaseMeta = DatabaseMeta.findDatabase( databases, con ); schemaName = XMLHandler.getTagValue( stepnode, "schema" ); tablename = XMLHandler.getTagValue( stepnode, "table" ); truncateTable = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "truncate" ) ); createTable = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "create" ) ); encoding = XMLHandler.getTagValue( stepnode, "encoding" ); dateformat = XMLHandler.getTagValue( stepnode, "dateformat" ); AddToResult = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "AddToResult" ) ); StartNewLine = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "StartNewLine" ) ); fileName = XMLHandler.getTagValue( stepnode, "file", "name" ); createparentfolder = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "create_parent_folder" ) ); extension = XMLHandler.getTagValue( stepnode, "file", "extention" ); fileAppended = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "append" ) ); stepNrInFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "split" ) ); partNrInFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "haspartno" ) ); dateInFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "add_date" ) ); timeInFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "add_time" ) ); splitEvery = Const.toInt( XMLHandler.getTagValue( stepnode, "file", "splitevery" ), 0 ); DoNotOpenNewFileInit = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "DoNotOpenNewFileInit" ) ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
Example #26
Source File: PaloCubeDelete.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { this.databaseMeta = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases ); this.setCubeName( rep.getStepAttributeString( id_jobentry, "cubeName" ) ); } catch ( KettleException dbe ) { throw new KettleException( "Unable to load job entry for type file exists from the repository for id_jobentry=" + id_jobentry, dbe ); } }
Example #27
Source File: SAPConnectionParamsHelper.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public static SAPConnectionParams getFromDatabaseMeta( DatabaseMeta sapConnection ) { String name = sapConnection.getName(); String host = sapConnection.environmentSubstitute( sapConnection.getHostname() ); String sysnr = sapConnection.environmentSubstitute( sapConnection.getAttributes().getProperty( "SAPSystemNumber" ) ); String client = sapConnection.environmentSubstitute( sapConnection.getAttributes().getProperty( "SAPClient" ) ); String user = sapConnection.environmentSubstitute( sapConnection.getUsername() ); String password = sapConnection.environmentSubstitute( sapConnection.getPassword() ); String lang = ""; return new SAPConnectionParams( name, host, sysnr, client, user, password, lang ); }
Example #28
Source File: SqlGenerator.java From pentaho-metadata with GNU Lesser General Public License v2.1 | 5 votes |
private static boolean hasAggregateDefinedAlready( String sql, DatabaseMeta databaseMeta ) { String trimmed = sql.trim(); return trimmed.startsWith( databaseMeta.getFunctionAverage() + "(" ) || //$NON-NLS-1$ trimmed.startsWith( databaseMeta.getFunctionCount() + "(" ) || //$NON-NLS-1$ trimmed.startsWith( databaseMeta.getFunctionMaximum() + "(" ) || //$NON-NLS-1$ trimmed.startsWith( databaseMeta.getFunctionMinimum() + "(" ) || //$NON-NLS-1$ trimmed.startsWith( databaseMeta.getFunctionSum() + "(" ); //$NON-NLS-1$ }
Example #29
Source File: GetVariableMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { fieldDefinitions[i].setFieldName( rep.getStepAttributeString( id_step, i, "field_name" ) ); fieldDefinitions[i].setVariableString( rep.getStepAttributeString( id_step, i, "field_variable" ) ); fieldDefinitions[i].setFieldType( ValueMetaFactory.getIdForValueMeta( rep.getStepAttributeString( id_step, i, "field_type" ) ) ); fieldDefinitions[i].setFieldFormat( rep.getStepAttributeString( id_step, i, "field_format" ) ); fieldDefinitions[i].setCurrency( rep.getStepAttributeString( id_step, i, "field_currency" ) ); fieldDefinitions[i].setDecimal( rep.getStepAttributeString( id_step, i, "field_decimal" ) ); fieldDefinitions[i].setGroup( rep.getStepAttributeString( id_step, i, "field_group" ) ); fieldDefinitions[i].setFieldLength( (int) rep.getStepAttributeInteger( id_step, i, "field_length" ) ); fieldDefinitions[i].setFieldPrecision( (int) rep.getStepAttributeInteger( id_step, i, "field_precision" ) ); fieldDefinitions[i].setTrimType( ValueMetaString.getTrimTypeByCode( rep.getStepAttributeString( id_step, i, "field_trim_type" ) ) ); // Backward compatibility // int fieldType = fieldDefinitions[i].getFieldType(); if ( fieldType == ValueMetaInterface.TYPE_NONE ) { fieldDefinitions[i].setFieldType( ValueMetaInterface.TYPE_STRING ); } } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } }
Example #30
Source File: JobEntryFileCompare.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { filename1 = rep.getJobEntryAttributeString( id_jobentry, "filename1" ); filename2 = rep.getJobEntryAttributeString( id_jobentry, "filename2" ); addFilenameToResult = rep.getJobEntryAttributeBoolean( id_jobentry, "add_filename_result" ); } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryFileCompare.ERROR_0002_Unable_To_Load_Job_From_Repository", id_jobentry ), dbe ); } }