org.pentaho.di.repository.ObjectId Java Examples
The following examples show how to use
org.pentaho.di.repository.ObjectId.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DBProcMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveDatabaseMetaStepAttribute( id_transformation, id_step, "id_connection", database ); rep.saveStepAttribute( id_transformation, id_step, "procedure", procedure ); for ( int i = 0; i < argument.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "arg_name", argument[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "arg_direction", argumentDirection[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "arg_type", ValueMetaFactory.getValueMetaName( argumentType[i] ) ); } rep.saveStepAttribute( id_transformation, id_step, "result_name", resultName ); rep.saveStepAttribute( id_transformation, id_step, "result_type", ValueMetaFactory.getValueMetaName( resultType ) ); rep.saveStepAttribute( id_transformation, id_step, "auto_commit", autoCommit ); // Also, save the step-database relationship! if ( database != null ) { rep.insertStepDatabase( id_transformation, id_step, database.getObjectId() ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "DBProcMeta.Exception.UnableToSaveStepInfo" ) + id_step, e ); } }
Example #2
Source File: RepositoryExplorerDialog.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void editCluster( String clusterName ) { try { ObjectId id = rep.getClusterID( clusterName ); ClusterSchema cluster = rep.loadClusterSchema( id, rep.getSlaveServers(), null ); // Load the last version ClusterSchemaDialog dd = new ClusterSchemaDialog( shell, cluster, rep.getSlaveServers() ); if ( dd.open() ) { rep.insertLogEntry( "Updating cluster '" + cluster.getName() + "'" ); rep.save( cluster, Const.VERSION_COMMENT_EDIT_VERSION, null ); if ( !clusterName.equalsIgnoreCase( cluster.getName() ) ) { refreshTree(); } } } catch ( KettleException e ) { //CHECKSTYLE:LineLength:OFF new ErrorDialog( shell, BaseMessages.getString( PKG, "RepositoryExplorerDialog.Cluster.Edit.UnexpectedError.Title" ), BaseMessages.getString( PKG, "RepositoryExplorerDialog.Cluster.Edit.UnexpectedError.Message" ) + clusterName + "]", e ); } }
Example #3
Source File: GetRepositoryNamesMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveStepAttribute( id_transformation, id_step, "object_type", objectTypeSelection.toString() ); rep.saveStepAttribute( id_transformation, id_step, "rownum", includeRowNumber ); rep.saveStepAttribute( id_transformation, id_step, "rownum_field", rowNumberField ); for ( int i = 0; i < directory.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "directory", directory[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "name_mask", nameMask[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "exclude_name_mask", excludeNameMask[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "include_subfolders", includeSubFolders[i] ); } } catch ( Exception e ) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e ); } }
Example #4
Source File: OdpsMeta.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
@Override public void readRep(Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases) throws KettleException { setEndpoint(rep.getStepAttributeString(id_step, "endpoint")); setTunnelEndpoint(rep.getStepAttributeString(id_step, "tunnelEndpoint")); setAccessId(rep.getStepAttributeString(id_step, "accessId")); setAccessKey(rep.getStepAttributeString(id_step, "accessKey")); setProjectName(rep.getStepAttributeString(id_step, "projectName")); setTableName(rep.getStepAttributeString(id_step, "tableName")); setPartition(rep.getStepAttributeString(id_step, "partition")); int nrFields = rep.countNrStepAttributes(id_step, "field_name"); odpsFields = new ArrayList<OdpsField>(nrFields); for (int i = 0; i < nrFields; i++) { OdpsField field = new OdpsField(); field.setName(rep.getStepAttributeString(id_step, i, "field_name")); field.setType(rep.getStepAttributeString(id_step, i, "field_type")); field.setComment(rep.getStepAttributeString(id_step, i, "field_comment")); odpsFields.add(field); } }
Example #5
Source File: KettleDatabaseRepositoryMetaStore.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Override public IMetaStoreElementType getElementType( String namespace, String elementTypeId ) throws MetaStoreException { try { ObjectId namespaceId = delegate.getNamespaceId( namespace ); if ( namespaceId == null ) { return null; } RowMetaAndData elementTypeRow = delegate.getElementType( new LongObjectId( new StringObjectId( elementTypeId ) ) ); return delegate.parseElementType( namespace, namespaceId, elementTypeRow ); } catch ( Exception e ) { throw new MetaStoreException( "Unable to get element type with id '" + elementTypeId + "' in namespace '" + namespace + "'", e ); } }
Example #6
Source File: KettleDatabaseRepository.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public synchronized void insertJobEntryDatabase( ObjectId id_job, ObjectId id_jobentry, ObjectId id_database ) throws KettleException { // First check if the relationship is already there. // There is no need to store it twice! RowMetaAndData check = getJobEntryDatabase( id_jobentry ); if ( check.getInteger( 0 ) == null ) { RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB ), id_job ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY ), id_jobentry ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE ), id_database ); connectionDelegate.insertTableRow( KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE, table ); } }
Example #7
Source File: AggregateRowsMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { fieldName[i] = rep.getStepAttributeString( id_step, i, "field_name" ); fieldNewName[i] = rep.getStepAttributeString( id_step, i, "field_rename" ); aggregateType[i] = getType( rep.getStepAttributeString( id_step, i, "field_type" ) ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "AggregateRowsMeta.Exception.UnexpectedErrorWhileReadingStepInfo" ), e ); } }
Example #8
Source File: NormaliserMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveStepAttribute( id_transformation, id_step, "typefield", typeField ); for ( int i = 0; i < normaliserFields.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "field_name", normaliserFields[i].getName() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_value", normaliserFields[i].getValue() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_norm", normaliserFields[i].getNorm() ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "NormaliserMeta.Exception.UnableToSaveStepInfoToRepository" ) + id_step, e ); } }
Example #9
Source File: KettleDatabaseRepositoryTest.java From pentaho-kettle with Apache License 2.0 | 6 votes |
@Test public void testInsertJobEntryDatabase() throws KettleException { doReturn( getNullIntegerRow() ).when( repo.connectionDelegate ).getOneRow( anyString(), anyString(), any( ObjectId.class ) ); ArgumentCaptor<String> argumentTableName = ArgumentCaptor.forClass( String.class ); ArgumentCaptor<RowMetaAndData> argumentTableData = ArgumentCaptor.forClass( RowMetaAndData.class ); doNothing().when( repo.connectionDelegate ).insertTableRow( argumentTableName.capture(), argumentTableData.capture() ); repo.insertJobEntryDatabase( new LongObjectId( 234 ), new LongObjectId( 345 ), new LongObjectId( 456 ) ); RowMetaAndData insertRecord = argumentTableData.getValue(); assertEquals( KettleDatabaseRepository.TABLE_R_JOBENTRY_DATABASE, argumentTableName.getValue() ); assertEquals( 3, insertRecord.size() ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 0 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOB, insertRecord.getValueMeta( 0 ).getName() ); assertEquals( Long.valueOf( 234 ), insertRecord.getInteger( 0 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 1 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_JOBENTRY, insertRecord.getValueMeta( 1 ).getName() ); assertEquals( Long.valueOf( 345 ), insertRecord.getInteger( 1 ) ); assertEquals( ValueMetaInterface.TYPE_INTEGER, insertRecord.getValueMeta( 2 ).getType() ); assertEquals( KettleDatabaseRepository.FIELD_JOBENTRY_DATABASE_ID_DATABASE, insertRecord.getValueMeta( 2 ).getName() ); assertEquals( Long.valueOf( 456 ), insertRecord.getInteger( 2 ) ); }
Example #10
Source File: KettleDatabaseRepositoryDatabaseDelegate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public synchronized int getNrDatabases( ObjectId id_transformation ) throws KettleException { int retval = 0; RowMetaAndData transIdRow = repository.connectionDelegate.getParameterMetaData( id_transformation ); String sql = "SELECT COUNT(*) FROM " + quoteTable( KettleDatabaseRepository.TABLE_R_STEP_DATABASE ) + " WHERE " + quote( KettleDatabaseRepository.FIELD_STEP_DATABASE_ID_TRANSFORMATION ) + " = ? "; RowMetaAndData r = repository.connectionDelegate.getOneRow( sql, transIdRow.getRowMeta(), transIdRow.getData() ); if ( r != null ) { retval = (int) r.getInteger( 0, 0L ); } return retval; }
Example #11
Source File: KettleDatabaseRepositoryTransDelegate.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveTransHopMeta( TransHopMeta transHopMeta, ObjectId id_transformation ) throws KettleException { try { // See if a transformation hop with the same fromstep and tostep is // already available... ObjectId id_step_from = transHopMeta.getFromStep() == null ? null : transHopMeta.getFromStep().getObjectId(); ObjectId id_step_to = transHopMeta.getToStep() == null ? null : transHopMeta.getToStep().getObjectId(); // Insert new transMeta hop in repository transHopMeta.setObjectId( insertTransHop( id_transformation, id_step_from, id_step_to, transHopMeta .isEnabled() ) ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "TransHopMeta.Exception.UnableToSaveTransformationHopInfo" ) + id_transformation, dbe ); } }
Example #12
Source File: JobEntryMysqlBulkLoad.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "schemaname", schemaname ); rep.saveJobEntryAttribute( id_job, getObjectId(), "tablename", tablename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "filename", filename ); rep.saveJobEntryAttribute( id_job, getObjectId(), "separator", separator ); rep.saveJobEntryAttribute( id_job, getObjectId(), "enclosed", enclosed ); rep.saveJobEntryAttribute( id_job, getObjectId(), "escaped", escaped ); rep.saveJobEntryAttribute( id_job, getObjectId(), "linestarted", linestarted ); rep.saveJobEntryAttribute( id_job, getObjectId(), "lineterminated", lineterminated ); rep.saveJobEntryAttribute( id_job, getObjectId(), "replacedata", replacedata ); rep.saveJobEntryAttribute( id_job, getObjectId(), "ignorelines", ignorelines ); rep.saveJobEntryAttribute( id_job, getObjectId(), "listattribut", listattribut ); rep.saveJobEntryAttribute( id_job, getObjectId(), "localinfile", localinfile ); rep.saveJobEntryAttribute( id_job, getObjectId(), "prorityvalue", prorityvalue ); rep.saveJobEntryAttribute( id_job, getObjectId(), "addfiletoresult", addfiletoresult ); rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Unable to load job entry of type 'Mysql Bulk Load' to the repository for id_job=" + id_job, dbe ); } }
Example #13
Source File: KettleDatabaseRepository.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public synchronized void deleteCondition( ObjectId id_condition ) throws KettleException { boolean ok = true; ObjectId[] ids = getSubConditionIDs( id_condition ); if ( ids.length > 0 ) { // Delete the sub-conditions... for ( int i = 0; i < ids.length && ok; i++ ) { deleteCondition( ids[i] ); } // Then delete the main condition deleteCondition( id_condition ); } else { connectionDelegate.performDelete( "DELETE FROM " + quoteTable( KettleDatabaseRepository.TABLE_R_CONDITION ) + " WHERE " + quote( KettleDatabaseRepository.FIELD_CONDITION_ID_CONDITION ) + " = ? ", id_condition ); } }
Example #14
Source File: ExecSQLRowMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveDatabaseMetaStepAttribute( id_transformation, id_step, "id_connection", databaseMeta ); rep.saveStepAttribute( id_transformation, id_step, "commit", commitSize ); rep.saveStepAttribute( id_transformation, id_step, "sql_field", sqlField ); rep.saveStepAttribute( id_transformation, id_step, "insert_field", insertField ); rep.saveStepAttribute( id_transformation, id_step, "update_field", updateField ); rep.saveStepAttribute( id_transformation, id_step, "delete_field", deleteField ); rep.saveStepAttribute( id_transformation, id_step, "read_field", readField ); // Also, save the step-database relationship! if ( databaseMeta != null ) { rep.insertStepDatabase( id_transformation, id_step, databaseMeta.getObjectId() ); } rep.saveStepAttribute( id_transformation, id_step, "sqlFromfile", sqlFromfile ); rep.saveStepAttribute( id_transformation, id_step, "sendOneStatement", sendOneStatement ); } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "ExecSQLRowMeta.Exception.UnableToSaveStepInfo" ) + id_step, e ); } }
Example #15
Source File: CassandraInputMeta.java From learning-hadoop with Apache License 2.0 | 6 votes |
public void readRep(Repository rep, ObjectId id_step, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleException { m_cassandraHost = rep.getStepAttributeString(id_step, 0, "cassandra_host"); m_cassandraPort = rep.getStepAttributeString(id_step, 0, "cassandra_port"); m_username = rep.getStepAttributeString(id_step, 0, "username"); m_password = rep.getStepAttributeString(id_step, 0, "password"); if (!Const.isEmpty(m_password)) { m_password = Encr.decryptPasswordOptionallyEncrypted(m_password); } m_cassandraKeyspace = rep.getStepAttributeString(id_step, 0, "cassandra_keyspace"); m_cqlSelectQuery = rep.getStepAttributeString(id_step, 0, "cql_select_query"); m_useCompression = rep.getStepAttributeBoolean(id_step, 0, "use_compression"); m_outputKeyValueTimestampTuples = rep.getStepAttributeBoolean(id_step, 0, "output_key_value_timestamp_tuples"); m_useThriftIO = rep.getStepAttributeBoolean(id_step, 0, "use_thrift_io"); m_socketTimeout = rep.getStepAttributeString(id_step, 0, "socket_timeout"); }
Example #16
Source File: ColumnExistsMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveDatabaseMetaStepAttribute( id_transformation, id_step, "id_connection", database ); rep.saveStepAttribute( id_transformation, id_step, "tablename", tablename ); rep.saveStepAttribute( id_transformation, id_step, "schemaname", schemaname ); rep.saveStepAttribute( id_transformation, id_step, "istablenameInfield", istablenameInfield ); rep.saveStepAttribute( id_transformation, id_step, "tablenamefield", tablenamefield ); rep.saveStepAttribute( id_transformation, id_step, "columnnamefield", columnnamefield ); rep.saveStepAttribute( id_transformation, id_step, "resultfieldname", resultfieldname ); // Also, save the step-database relationship! if ( database != null ) { rep.insertStepDatabase( id_transformation, id_step, database.getObjectId() ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "ColumnExistsMeta.Exception.UnableToSaveStepInfo" ) + id_step, e ); } }
Example #17
Source File: KettleFileRepository.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public ObjectId[] getSubDirectoryIDs( ObjectId id_directory ) throws KettleException { RepositoryDirectoryInterface tree = loadRepositoryDirectoryTree(); RepositoryDirectoryInterface directory = tree.findDirectory( id_directory ); ObjectId[] objectIds = new ObjectId[directory.getNrSubdirectories()]; for ( int i = 0; i < objectIds.length; i++ ) { objectIds[i] = directory.getSubdirectory( i ).getObjectId(); } return objectIds; }
Example #18
Source File: KettleDatabaseRepositoryJobDelegate.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private void saveJobAttributesMap( ObjectId jobId, Map<String, Map<String, String>> attributesMap ) throws KettleException { for ( final Map.Entry<String, Map<String, String>> attributesEntry : attributesMap.entrySet() ) { Map<String, String> attributes = attributesEntry.getValue(); String groupName = attributesEntry.getKey(); for ( final Map.Entry<String, String> entry : attributes.entrySet() ) { final String value = entry.getValue(); String key = entry.getKey(); if ( key != null && value != null ) { repository.connectionDelegate.insertJobAttribute( jobId, 0, JOB_ATTRIBUTE_PREFIX + groupName + '\t' + key, 0, value ); } } } }
Example #19
Source File: PurRepository_Revisions_IT.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private List<VersionSummary> assertExistsAndGetRevisions( RepositoryElementInterface transOrJob ) { ObjectId id = transOrJob.getObjectId(); assertNotNull( id ); RepositoryFile file = unifiedRepository.getFileById( id.toString() ); assertNotNull( file ); return unifiedRepository.getVersionSummaries( id.toString() ); }
Example #20
Source File: JobEntryTableExists.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" ); schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" ); connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "TableExists.Meta.UnableLoadRep", "" + id_jobentry ), dbe ); } }
Example #21
Source File: DeleteMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { databaseMeta = rep.loadDatabaseMetaFromStepAttribute( id_step, "id_connection", databases ); commitSize = rep.getStepAttributeString( id_step, "commit" ); if ( commitSize == null ) { long comSz = -1; try { comSz = rep.getStepAttributeInteger( id_step, "commit" ); } catch ( Exception ex ) { commitSize = "100"; } if ( comSz >= 0 ) { commitSize = Long.toString( comSz ); } } schemaName = rep.getStepAttributeString( id_step, "schema" ); tableName = rep.getStepAttributeString( id_step, "table" ); int nrkeys = rep.countNrStepAttributes( id_step, "key_name" ); allocate( nrkeys ); for ( int i = 0; i < nrkeys; i++ ) { keyStream[i] = rep.getStepAttributeString( id_step, i, "key_name" ); keyLookup[i] = rep.getStepAttributeString( id_step, i, "key_field" ); keyCondition[i] = rep.getStepAttributeString( id_step, i, "key_condition" ); keyStream2[i] = rep.getStepAttributeString( id_step, i, "key_name2" ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "DeleteMeta.Exception.UnexpectedErrorInReadingStepInfo" ), e ); } }
Example #22
Source File: HL7MLLPInput.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "server", server ); rep.saveJobEntryAttribute( id_job, getObjectId(), "port", port ); rep.saveJobEntryAttribute( id_job, getObjectId(), "message_variable", messageVariableName ); rep.saveJobEntryAttribute( id_job, getObjectId(), "type_variable", messageTypeVariableName ); rep.saveJobEntryAttribute( id_job, getObjectId(), "version_variable", versionVariableName ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Unable to save job entry of type 'ftp' to the repository for id_job=" + id_job, dbe ); } }
Example #23
Source File: PurRepositoryStressTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void testLockLoadGetJobsUsingDatabase() throws Exception { ObjectId objId = mock( ObjectId.class ); doReturn( "id1" ).when( objId ).getId(); assertNotNull( purRepository.getJobsUsingDatabase( objId ) ); verify( objId, times( 1 ) ).getId(); }
Example #24
Source File: GenerateCsvMeta.java From knowbi-pentaho-pdi-neo4j-output with Apache License 2.0 | 5 votes |
@Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId transformationId, ObjectId stepId ) throws KettleException { rep.saveStepAttribute( transformationId, stepId, GRAPH_FIELD_NAME, graphFieldName ); rep.saveStepAttribute( transformationId, stepId, BASE_FOLDER, baseFolder ); rep.saveStepAttribute( transformationId, stepId, UNIQUENESS_STRATEGY, uniquenessStrategy != null ? uniquenessStrategy.name() : null ); rep.saveStepAttribute( transformationId, stepId, FILES_PREFIX, filesPrefix ); rep.saveStepAttribute( transformationId, stepId, FILENAME_FIELD, filenameField ); rep.saveStepAttribute( transformationId, stepId, FILE_TYPE_FIELD, fileTypeField ); }
Example #25
Source File: PurRepositoryStressTest.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void testLockLoadPartitionSchema() throws Exception { ObjectId objId = mock( ObjectId.class ); doReturn( "id1" ).when( objId ).getId(); assertNotNull( purRepository.loadPartitionSchema( objId, "v1" ) ); verify( objId, times( 3 ) ).getId(); }
Example #26
Source File: SalesforceInputMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId idTransformation, ObjectId idStep ) throws KettleException { super.saveRep( rep, metaStore, idTransformation, idStep ); try { // H.kawaguchi Add 19-01-2009 rep.saveStepAttribute( idTransformation, idStep, "condition", getCondition() ); // H.kawaguchi Add 19-01-2009 rep.saveStepAttribute( idTransformation, idStep, "query", getQuery() ); rep.saveStepAttribute( idTransformation, idStep, "specifyQuery", isSpecifyQuery() ); rep.saveStepAttribute( idTransformation, idStep, "include_targeturl", includeTargetURL() ); rep.saveStepAttribute( idTransformation, idStep, "targeturl_field", getTargetURLField() ); rep.saveStepAttribute( idTransformation, idStep, "include_module", includeModule() ); rep.saveStepAttribute( idTransformation, idStep, "module_field", getModuleField() ); rep.saveStepAttribute( idTransformation, idStep, "include_rownum", includeRowNumber() ); rep.saveStepAttribute( idTransformation, idStep, "include_deletion_date", includeDeletionDate() ); rep.saveStepAttribute( idTransformation, idStep, "include_sql", includeSQL() ); rep.saveStepAttribute( idTransformation, idStep, "sql_field", getSQLField() ); rep.saveStepAttribute( idTransformation, idStep, "include_Timestamp", includeTimestamp() ); rep.saveStepAttribute( idTransformation, idStep, "timestamp_field", getTimestampField() ); rep.saveStepAttribute( idTransformation, idStep, "rownum_field", getRowNumberField() ); rep.saveStepAttribute( idTransformation, idStep, "deletion_date_field", getDeletionDateField() ); rep.saveStepAttribute( idTransformation, idStep, "limit", getRowLimit() ); rep.saveStepAttribute( idTransformation, idStep, "read_from", getReadFrom() ); rep.saveStepAttribute( idTransformation, idStep, "read_to", getReadTo() ); rep.saveStepAttribute( idTransformation, idStep, "records_filter", SalesforceConnectionUtils .getRecordsFilterCode( getRecordsFilter() ) ); rep.saveStepAttribute( idTransformation, idStep, "queryAll", isQueryAll() ); for ( int i = 0; i < inputFields.length; i++ ) { SalesforceInputField field = inputFields[i]; field.saveRep( rep, metaStore, idTransformation, idStep, i ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "SalesforceInputMeta.Exception.ErrorSavingToRepository", "" + idStep ), e ); } }
Example #27
Source File: SynchronizeAfterMergeMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveDatabaseMetaStepAttribute( id_transformation, id_step, "id_connection", databaseMeta ); rep.saveStepAttribute( id_transformation, id_step, "commit", commitSize ); rep.saveStepAttribute( id_transformation, id_step, "schema", schemaName ); rep.saveStepAttribute( id_transformation, id_step, "table", tableName ); rep.saveStepAttribute( id_transformation, id_step, "tablename_in_field", tablenameInField ); rep.saveStepAttribute( id_transformation, id_step, "tablename_field", tablenameField ); rep.saveStepAttribute( id_transformation, id_step, "operation_order_field", operationOrderField ); rep.saveStepAttribute( id_transformation, id_step, "order_insert", OrderInsert ); rep.saveStepAttribute( id_transformation, id_step, "order_update", OrderUpdate ); rep.saveStepAttribute( id_transformation, id_step, "order_delete", OrderDelete ); rep.saveStepAttribute( id_transformation, id_step, "use_batch", useBatchUpdate ); rep.saveStepAttribute( id_transformation, id_step, "perform_lookup", performLookup ); for ( int i = 0; i < keyStream.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "key_name", keyStream[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "key_field", keyLookup[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "key_condition", keyCondition[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "key_name2", keyStream2[i] ); } for ( int i = 0; i < updateLookup.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "value_name", updateLookup[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "value_rename", updateStream[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "value_update", update[i].booleanValue() ); } // Also, save the step-database relationship! if ( databaseMeta != null ) { rep.insertStepDatabase( id_transformation, id_step, databaseMeta.getObjectId() ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "SynchronizeAfterMergeMeta.Exception.UnableToSaveStepInfoToRepository" ) + id_step, e ); } }
Example #28
Source File: NullIfMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { for ( int i = 0; i < fields.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "field_name", fields[i].getFieldName() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_value", fields[i].getFieldValue() ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "NullIfMeta.Exception.UnableToSaveStepInfoToRepository" ) + id_step, e ); } }
Example #29
Source File: KettleDatabaseRepositoryMetaStoreDelegate.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public ObjectId insertElement( IMetaStoreElementType elementType, IMetaStoreElement element ) throws MetaStoreException { try { LongObjectId elementId = repository.connectionDelegate.getNextID( quoteTable( KettleDatabaseRepository.TABLE_R_ELEMENT ), quote( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT ) ); RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT ), elementId .longValue() ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_ELEMENT_ID_ELEMENT_TYPE ), Long .valueOf( elementType.getId() ) ); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_ELEMENT_NAME ), element .getName() ); repository.connectionDelegate.getDatabase().prepareInsert( table.getRowMeta(), KettleDatabaseRepository.TABLE_R_ELEMENT ); repository.connectionDelegate.getDatabase().setValuesInsert( table ); repository.connectionDelegate.getDatabase().insertRow(); repository.connectionDelegate.getDatabase().closeInsert(); element.setId( elementId.toString() ); // Now save the attributes // insertAttributes( element.getChildren(), elementId, new LongObjectId( 0L ) ); if ( log.isDebug() ) { log.logDebug( "Saved element with name [" + element.getName() + "]" ); } return elementId; } catch ( Exception e ) { throw new MetaStoreException( "Unable to create new element with name '" + element.getName() + "'", e ); } }
Example #30
Source File: RepositoryProxy.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public String getStepAttributeString( ObjectId idStep, int nr, String code ) throws KettleException { String propName = code + PROP_CODE_NR_SEPARATOR + nr; if ( node.hasProperty( propName ) ) { return node.getProperty( propName ).getString(); } else if ( nr == 0 && node.hasProperty( code ) ) { // Old pur stored elements with no nr when not specified return node.getProperty( code ).getString(); } else { return null; } }