Java Code Examples for org.pentaho.di.core.Const#toLong()
The following examples show how to use
org.pentaho.di.core.Const#toLong() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ServerStatus.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public ServerStatus( Node statusNode ) throws KettleException { this(); statusDescription = XMLHandler.getTagValue( statusNode, "statusdesc" ); memoryFree = Const.toLong( XMLHandler.getTagValue( statusNode, "memory_free" ), -1L ); memoryTotal = Const.toLong( XMLHandler.getTagValue( statusNode, "memory_total" ), -1L ); String cpuCoresStr = XMLHandler.getTagValue( statusNode, "cpu_cores" ); cpuCores = Const.toInt( cpuCoresStr, -1 ); String cpuProcessTimeStr = XMLHandler.getTagValue( statusNode, "cpu_process_time" ); cpuProcessTime = Utils.isEmpty( cpuProcessTimeStr ) ? 0L : Long.valueOf( cpuProcessTimeStr ); uptime = Const.toLong( XMLHandler.getTagValue( statusNode, "uptime" ), -1 ); threadCount = Const.toInt( XMLHandler.getTagValue( statusNode, "thread_count" ), -1 ); loadAvg = Const.toDouble( XMLHandler.getTagValue( statusNode, "load_avg" ), -1.0 ); osName = XMLHandler.getTagValue( statusNode, "os_name" ); osVersion = XMLHandler.getTagValue( statusNode, "os_version" ); osArchitecture = XMLHandler.getTagValue( statusNode, "os_arch" ); }
Example 2
Source File: GetSubFoldersMeta.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { includeRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "rownum" ) ); isFoldernameDynamic = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "foldername_dynamic" ) ); rowNumberField = XMLHandler.getTagValue( stepnode, "rownum_field" ); dynamicFoldernameField = XMLHandler.getTagValue( stepnode, "foldername_field" ); // Is there a limit on the number of rows we process? rowLimit = Const.toLong( XMLHandler.getTagValue( stepnode, "limit" ), 0L ); Node filenode = XMLHandler.getSubNode( stepnode, "file" ); int nrfiles = XMLHandler.countNodes( filenode, "name" ); allocate( nrfiles ); for ( int i = 0; i < nrfiles; i++ ) { Node folderNamenode = XMLHandler.getSubNodeByNr( filenode, "name", i ); Node folderRequirednode = XMLHandler.getSubNodeByNr( filenode, "file_required", i ); folderName[i] = XMLHandler.getNodeValue( folderNamenode ); folderRequired[i] = XMLHandler.getNodeValue( folderRequirednode ); } } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
Example 3
Source File: Neo4JOutput.java From knowbi-pentaho-pdi-neo4j-output with Apache License 2.0 | 5 votes |
public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (Neo4JOutputMeta) smi; data = (Neo4JOutputData) sdi; if ( !meta.isReturningGraph() ) { // Connect to Neo4j using info metastore Neo4j Connection metadata // if ( StringUtils.isEmpty( meta.getConnection() ) ) { log.logError( "You need to specify a Neo4j connection to use in this step" ); return false; } try { // To correct lazy programmers who built certain PDI steps... // data.metaStore = MetaStoreUtil.findMetaStore( this ); data.neoConnection = NeoConnectionUtils.getConnectionFactory( data.metaStore ).loadElement( meta.getConnection() ); if (data.neoConnection==null) { log.logError("Connection '"+meta.getConnection()+"' could not be found in the metastore "+MetaStoreUtil.getMetaStoreDescription(metaStore)); return false; } data.neoConnection.initializeVariablesFrom( this ); data.version4 = data.neoConnection.isVersion4(); } catch ( MetaStoreException e ) { log.logError( "Could not gencsv Neo4j connection '" + meta.getConnection() + "' from the metastore", e ); return false; } data.batchSize = Const.toLong( environmentSubstitute( meta.getBatchSize() ), 1 ); } return super.init( smi, sdi ); }
Example 4
Source File: SlaveServerStatus.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public SlaveServerStatus( Node statusNode ) throws KettleException { this(); statusDescription = XMLHandler.getTagValue( statusNode, "statusdesc" ); memoryFree = Const.toLong( XMLHandler.getTagValue( statusNode, "memory_free" ), -1L ); memoryTotal = Const.toLong( XMLHandler.getTagValue( statusNode, "memory_total" ), -1L ); String cpuCoresStr = XMLHandler.getTagValue( statusNode, "cpu_cores" ); cpuCores = Const.toInt( cpuCoresStr, -1 ); String cpuProcessTimeStr = XMLHandler.getTagValue( statusNode, "cpu_process_time" ); cpuProcessTime = Utils.isEmpty( cpuProcessTimeStr ) ? 0L : Long.valueOf( cpuProcessTimeStr ); uptime = Const.toLong( XMLHandler.getTagValue( statusNode, "uptime" ), -1 ); threadCount = Const.toInt( XMLHandler.getTagValue( statusNode, "thread_count" ), -1 ); loadAvg = Const.toDouble( XMLHandler.getTagValue( statusNode, "load_avg" ), -1.0 ); osName = XMLHandler.getTagValue( statusNode, "os_name" ); osVersion = XMLHandler.getTagValue( statusNode, "os_version" ); osArchitecture = XMLHandler.getTagValue( statusNode, "os_arch" ); Node listTransNode = XMLHandler.getSubNode( statusNode, "transstatuslist" ); Node listJobsNode = XMLHandler.getSubNode( statusNode, "jobstatuslist" ); int nrTrans = XMLHandler.countNodes( listTransNode, SlaveServerTransStatus.XML_TAG ); int nrJobs = XMLHandler.countNodes( listJobsNode, SlaveServerJobStatus.XML_TAG ); for ( int i = 0; i < nrTrans; i++ ) { Node transStatusNode = XMLHandler.getSubNodeByNr( listTransNode, SlaveServerTransStatus.XML_TAG, i ); transStatusList.add( new SlaveServerTransStatus( transStatusNode ) ); } for ( int i = 0; i < nrJobs; i++ ) { Node jobStatusNode = XMLHandler.getSubNodeByNr( listJobsNode, SlaveServerJobStatus.XML_TAG, i ); jobStatusList.add( new SlaveServerJobStatus( jobStatusNode ) ); } }
Example 5
Source File: GetSlaveSequence.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetSlaveSequenceMeta) smi; data = (GetSlaveSequenceData) sdi; if ( super.init( smi, sdi ) ) { data.increment = Const.toLong( environmentSubstitute( meta.getIncrement() ), 1000 ); data.slaveServer = getTransMeta().findSlaveServer( environmentSubstitute( meta.getSlaveServerName() ) ); data.sequenceName = environmentSubstitute( meta.getSequenceName() ); data.value = -1; return true; } return false; }
Example 6
Source File: SlaveServer.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public long getNextSlaveSequenceValue( String slaveSequenceName, long incrementValue ) throws KettleException { try { String xml = execService( NextSequenceValueServlet.CONTEXT_PATH + "/" + "?" + NextSequenceValueServlet.PARAM_NAME + "=" + URLEncoder.encode( slaveSequenceName, "UTF-8" ) + "&" + NextSequenceValueServlet.PARAM_INCREMENT + "=" + Long.toString( incrementValue ) ); Document doc = XMLHandler.loadXMLString( xml ); Node seqNode = XMLHandler.getSubNode( doc, NextSequenceValueServlet.XML_TAG ); String nextValueString = XMLHandler.getTagValue( seqNode, NextSequenceValueServlet.XML_TAG_VALUE ); String errorString = XMLHandler.getTagValue( seqNode, NextSequenceValueServlet.XML_TAG_ERROR ); if ( !Utils.isEmpty( errorString ) ) { throw new KettleException( errorString ); } if ( Utils.isEmpty( nextValueString ) ) { throw new KettleException( "No value retrieved from slave sequence '" + slaveSequenceName + "' on slave " + toString() ); } long nextValue = Const.toLong( nextValueString, Long.MIN_VALUE ); if ( nextValue == Long.MIN_VALUE ) { throw new KettleException( "Incorrect value '" + nextValueString + "' retrieved from slave sequence '" + slaveSequenceName + "' on slave " + toString() ); } return nextValue; } catch ( Exception e ) { throw new KettleException( "There was a problem retrieving a next sequence value from slave sequence '" + slaveSequenceName + "' on slave " + toString(), e ); } }
Example 7
Source File: RssInputMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { urlInField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "url_in_field" ) ); urlFieldname = XMLHandler.getTagValue( stepnode, "url_field_name" ); includeRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "rownum" ) ); rowNumberField = XMLHandler.getTagValue( stepnode, "rownum_field" ); includeUrl = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "include_url" ) ); urlField = XMLHandler.getTagValue( stepnode, "url_Field" ); readfrom = XMLHandler.getTagValue( stepnode, "read_from" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrFields = XMLHandler.countNodes( fields, "field" ); Node urlnode = XMLHandler.getSubNode( stepnode, "urls" ); int nrUrls = XMLHandler.countNodes( urlnode, "url" ); allocate( nrUrls, nrFields ); for ( int i = 0; i < nrUrls; i++ ) { Node urlnamenode = XMLHandler.getSubNodeByNr( urlnode, "url", i ); url[i] = XMLHandler.getNodeValue( urlnamenode ); } for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); RssInputField field = new RssInputField( fnode ); inputFields[i] = field; } // Is there a limit on the number of rows we process? rowLimit = Const.toLong( XMLHandler.getTagValue( stepnode, "limit" ), 0L ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
Example 8
Source File: RowGenerator.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { try { meta = (RowGeneratorMeta) smi; data = (RowGeneratorData) sdi; if ( super.init( smi, sdi ) ) { // Determine the number of rows to generate... data.rowLimit = Const.toLong( environmentSubstitute( meta.getRowLimit() ), -1L ); data.rowsWritten = 0L; data.delay = Const.toLong( environmentSubstitute( meta.getIntervalInMs() ), -1L ); if ( data.rowLimit < 0L ) { // Unable to parse logError( BaseMessages.getString( PKG, "RowGenerator.Wrong.RowLimit.Number" ) ); return false; // fail } // Create a row (constants) with all the values in it... List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>(); // stores the errors... RowMetaAndData outputRow = buildRow( meta, remarks, getStepname() ); if ( !remarks.isEmpty() ) { for ( int i = 0; i < remarks.size(); i++ ) { CheckResult cr = (CheckResult) remarks.get( i ); logError( cr.getText() ); } return false; } data.outputRowData = outputRow.getData(); data.outputRowMeta = outputRow.getRowMeta(); return true; } return false; } catch ( Exception e ) { setErrors( 1L ); logError( "Error initializing step", e ); return false; } }
Example 9
Source File: PropertyInputMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { fileType = XMLHandler.getTagValue( stepnode, "file_type" ); encoding = XMLHandler.getTagValue( stepnode, "encoding" ); includeFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "include" ) ); filenameField = XMLHandler.getTagValue( stepnode, "include_field" ); includeRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "rownum" ) ); String addresult = XMLHandler.getTagValue( stepnode, "isaddresult" ); if ( Utils.isEmpty( addresult ) ) { isaddresult = true; } else { isaddresult = "Y".equalsIgnoreCase( addresult ); } section = XMLHandler.getTagValue( stepnode, "section" ); iniSectionField = XMLHandler.getTagValue( stepnode, "ini_section_field" ); includeIniSection = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "ini_section" ) ); filefield = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "filefield" ) ); rowNumberField = XMLHandler.getTagValue( stepnode, "rownum_field" ); dynamicFilenameField = XMLHandler.getTagValue( stepnode, "filename_Field" ); resetRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "resetrownumber" ) ); resolvevaluevariable = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "resolvevaluevariable" ) ); Node filenode = XMLHandler.getSubNode( stepnode, "file" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrFiles = XMLHandler.countNodes( filenode, "name" ); int nrFields = XMLHandler.countNodes( fields, "field" ); allocate( nrFiles, nrFields ); for ( int i = 0; i < nrFiles; i++ ) { Node filenamenode = XMLHandler.getSubNodeByNr( filenode, "name", i ); Node filemasknode = XMLHandler.getSubNodeByNr( filenode, "filemask", i ); Node excludefilemasknode = XMLHandler.getSubNodeByNr( filenode, "exclude_filemask", i ); Node fileRequirednode = XMLHandler.getSubNodeByNr( filenode, "file_required", i ); Node includeSubFoldersnode = XMLHandler.getSubNodeByNr( filenode, "include_subfolders", i ); fileName[i] = XMLHandler.getNodeValue( filenamenode ); fileMask[i] = XMLHandler.getNodeValue( filemasknode ); excludeFileMask[i] = XMLHandler.getNodeValue( excludefilemasknode ); fileRequired[i] = XMLHandler.getNodeValue( fileRequirednode ); includeSubFolders[i] = XMLHandler.getNodeValue( includeSubFoldersnode ); } for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); inputFields[i] = new PropertyInputField(); inputFields[i].setName( XMLHandler.getTagValue( fnode, "name" ) ); inputFields[i].setColumn( getColumnByCode( XMLHandler.getTagValue( fnode, "column" ) ) ); inputFields[i].setType( ValueMetaFactory.getIdForValueMeta( XMLHandler.getTagValue( fnode, "type" ) ) ); inputFields[i].setLength( Const.toInt( XMLHandler.getTagValue( fnode, "length" ), -1 ) ); inputFields[i].setPrecision( Const.toInt( XMLHandler.getTagValue( fnode, "precision" ), -1 ) ); String srepeat = XMLHandler.getTagValue( fnode, "repeat" ); inputFields[i].setTrimType( getTrimTypeByCode( XMLHandler.getTagValue( fnode, "trim_type" ) ) ); if ( srepeat != null ) { inputFields[i].setRepeated( YES.equalsIgnoreCase( srepeat ) ); } else { inputFields[i].setRepeated( false ); } inputFields[i].setFormat( XMLHandler.getTagValue( fnode, "format" ) ); inputFields[i].setCurrencySymbol( XMLHandler.getTagValue( fnode, "currency" ) ); inputFields[i].setDecimalSymbol( XMLHandler.getTagValue( fnode, "decimal" ) ); inputFields[i].setGroupSymbol( XMLHandler.getTagValue( fnode, "group" ) ); } // Is there a limit on the number of rows we process? rowLimit = Const.toLong( XMLHandler.getTagValue( stepnode, "limit" ), 0L ); shortFileFieldName = XMLHandler.getTagValue( stepnode, "shortFileFieldName" ); pathFieldName = XMLHandler.getTagValue( stepnode, "pathFieldName" ); hiddenFieldName = XMLHandler.getTagValue( stepnode, "hiddenFieldName" ); lastModificationTimeFieldName = XMLHandler.getTagValue( stepnode, "lastModificationTimeFieldName" ); uriNameFieldName = XMLHandler.getTagValue( stepnode, "uriNameFieldName" ); rootUriNameFieldName = XMLHandler.getTagValue( stepnode, "rootUriNameFieldName" ); extensionFieldName = XMLHandler.getTagValue( stepnode, "extensionFieldName" ); sizeFieldName = XMLHandler.getTagValue( stepnode, "sizeFieldName" ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
Example 10
Source File: JsonInputMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { includeFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "include" ) ); filenameField = XMLHandler.getTagValue( stepnode, "include_field" ); addResultFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "addresultfile" ) ); readurl = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "readurl" ) ); removeSourceField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "removeSourceField" ) ); isIgnoreEmptyFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "IsIgnoreEmptyFile" ) ); ignoreMissingPath = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "ignoreMissingPath" ) ); defaultPathLeafToNull = getDefaultPathLeafToNull( stepnode ); doNotFailIfNoFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "doNotFailIfNoFile" ) ); includeRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "rownum" ) ); rowNumberField = XMLHandler.getTagValue( stepnode, "rownum_field" ); Node filenode = XMLHandler.getSubNode( stepnode, "file" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrFiles = XMLHandler.countNodes( filenode, "name" ); int nrFields = XMLHandler.countNodes( fields, "field" ); initArrayFields( nrFiles, nrFields ); for ( int i = 0; i < nrFiles; i++ ) { Node filenamenode = XMLHandler.getSubNodeByNr( filenode, "name", i ); Node filemasknode = XMLHandler.getSubNodeByNr( filenode, "filemask", i ); Node excludefilemasknode = XMLHandler.getSubNodeByNr( filenode, "exclude_filemask", i ); Node fileRequirednode = XMLHandler.getSubNodeByNr( filenode, "file_required", i ); Node includeSubFoldersnode = XMLHandler.getSubNodeByNr( filenode, "include_subfolders", i ); getFileName()[i] = XMLHandler.getNodeValue( filenamenode ); getFileMask()[i] = XMLHandler.getNodeValue( filemasknode ); getExcludeFileMask()[i] = XMLHandler.getNodeValue( excludefilemasknode ); getFileRequired()[i] = XMLHandler.getNodeValue( fileRequirednode ); getIncludeSubFolders()[i] = XMLHandler.getNodeValue( includeSubFoldersnode ); } for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); JsonInputField field = new JsonInputField( fnode ); getInputFields()[i] = field; } // Is there a limit on the number of rows we process? rowLimit = Const.toLong( XMLHandler.getTagValue( stepnode, "limit" ), 0L ); setInFields( "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "IsInFields" ) ) ); isAFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "IsAFile" ) ); setFieldValue( XMLHandler.getTagValue( stepnode, "valueField" ) ); setShortFileNameField( XMLHandler.getTagValue( stepnode, "shortFileFieldName" ) ); setPathField( XMLHandler.getTagValue( stepnode, "pathFieldName" ) ); setIsHiddenField( XMLHandler.getTagValue( stepnode, "hiddenFieldName" ) ); setLastModificationDateField( XMLHandler.getTagValue( stepnode, "lastModificationTimeFieldName" ) ); setUriField( XMLHandler.getTagValue( stepnode, "uriNameFieldName" ) ); setRootUriField( XMLHandler.getTagValue( stepnode, "rootUriNameFieldName" ) ); setExtensionField( XMLHandler.getTagValue( stepnode, "extensionFieldName" ) ); setSizeField( XMLHandler.getTagValue( stepnode, "sizeFieldName" ) ); } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JsonInputMeta.Exception.ErrorLoadingXML", e .toString() ) ); } }
Example 11
Source File: AccessInputMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { includeFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "include" ) ); filenameField = XMLHandler.getTagValue( stepnode, "include_field" ); includeTablename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "tablename" ) ); tablenameField = XMLHandler.getTagValue( stepnode, "tablename_field" ); includeRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "rownum" ) ); String addresult = XMLHandler.getTagValue( stepnode, "isaddresult" ); if ( Utils.isEmpty( addresult ) ) { isaddresult = true; } else { isaddresult = "Y".equalsIgnoreCase( addresult ); } filefield = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "filefield" ) ); rowNumberField = XMLHandler.getTagValue( stepnode, "rownum_field" ); TableName = XMLHandler.getTagValue( stepnode, "table_name" ); dynamicFilenameField = XMLHandler.getTagValue( stepnode, "filename_Field" ); resetRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "resetrownumber" ) ); Node filenode = XMLHandler.getSubNode( stepnode, "file" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrFiles = XMLHandler.countNodes( filenode, "name" ); int nrFields = XMLHandler.countNodes( fields, "field" ); allocate( nrFiles, nrFields ); for ( int i = 0; i < nrFiles; i++ ) { Node filenamenode = XMLHandler.getSubNodeByNr( filenode, "name", i ); Node filemasknode = XMLHandler.getSubNodeByNr( filenode, "filemask", i ); Node excludefilemasknode = XMLHandler.getSubNodeByNr( filenode, "exclude_filemask", i ); Node fileRequirednode = XMLHandler.getSubNodeByNr( filenode, "file_required", i ); Node includeSubFoldersnode = XMLHandler.getSubNodeByNr( filenode, "include_subfolders", i ); fileName[i] = XMLHandler.getNodeValue( filenamenode ); fileMask[i] = XMLHandler.getNodeValue( filemasknode ); excludeFileMask[i] = XMLHandler.getNodeValue( excludefilemasknode ); fileRequired[i] = XMLHandler.getNodeValue( fileRequirednode ); includeSubFolders[i] = XMLHandler.getNodeValue( includeSubFoldersnode ); } for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); inputFields[i] = new AccessInputField(); inputFields[i].setName( XMLHandler.getTagValue( fnode, "name" ) ); inputFields[i].setColumn( XMLHandler.getTagValue( fnode, "attribut" ) ); inputFields[i].setType( ValueMetaFactory.getIdForValueMeta( XMLHandler.getTagValue( fnode, "type" ) ) ); inputFields[i].setLength( Const.toInt( XMLHandler.getTagValue( fnode, "length" ), -1 ) ); inputFields[i].setPrecision( Const.toInt( XMLHandler.getTagValue( fnode, "precision" ), -1 ) ); String srepeat = XMLHandler.getTagValue( fnode, "repeat" ); inputFields[i].setTrimType( getTrimTypeByCode( XMLHandler.getTagValue( fnode, "trim_type" ) ) ); if ( srepeat != null ) { inputFields[i].setRepeated( YES.equalsIgnoreCase( srepeat ) ); } else { inputFields[i].setRepeated( false ); } inputFields[i].setFormat( XMLHandler.getTagValue( fnode, "format" ) ); inputFields[i].setCurrencySymbol( XMLHandler.getTagValue( fnode, "currency" ) ); inputFields[i].setDecimalSymbol( XMLHandler.getTagValue( fnode, "decimal" ) ); inputFields[i].setGroupSymbol( XMLHandler.getTagValue( fnode, "group" ) ); } // Is there a limit on the number of rows we process? rowLimit = Const.toLong( XMLHandler.getTagValue( stepnode, "limit" ), 0L ); shortFileFieldName = XMLHandler.getTagValue( stepnode, "shortFileFieldName" ); pathFieldName = XMLHandler.getTagValue( stepnode, "pathFieldName" ); hiddenFieldName = XMLHandler.getTagValue( stepnode, "hiddenFieldName" ); lastModificationTimeFieldName = XMLHandler.getTagValue( stepnode, "lastModificationTimeFieldName" ); uriNameFieldName = XMLHandler.getTagValue( stepnode, "uriNameFieldName" ); rootUriNameFieldName = XMLHandler.getTagValue( stepnode, "rootUriNameFieldName" ); extensionFieldName = XMLHandler.getTagValue( stepnode, "extensionFieldName" ); sizeFieldName = XMLHandler.getTagValue( stepnode, "sizeFieldName" ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
Example 12
Source File: ValueString.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public long getInteger() { return Const.toLong( Const.ltrim( string ), 0L ); // Remove the leading space to make "int to string to int" // conversion possible. }
Example 13
Source File: GetXMLDataMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { includeFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "include" ) ); filenameField = XMLHandler.getTagValue( stepnode, "include_field" ); addResultFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "addresultfile" ) ); nameSpaceAware = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "namespaceaware" ) ); ignorecomments = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "ignorecomments" ) ); readurl = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "readurl" ) ); validating = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "validating" ) ); usetoken = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "usetoken" ) ); IsIgnoreEmptyFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "IsIgnoreEmptyFile" ) ); doNotFailIfNoFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "doNotFailIfNoFile" ) ); includeRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "rownum" ) ); rowNumberField = XMLHandler.getTagValue( stepnode, "rownum_field" ); encoding = XMLHandler.getTagValue( stepnode, "encoding" ); Node filenode = XMLHandler.getSubNode( stepnode, "file" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrFiles = XMLHandler.countNodes( filenode, "name" ); int nrFields = XMLHandler.countNodes( fields, "field" ); allocate( nrFiles, nrFields ); for ( int i = 0; i < nrFiles; i++ ) { Node filenamenode = XMLHandler.getSubNodeByNr( filenode, "name", i ); Node filemasknode = XMLHandler.getSubNodeByNr( filenode, "filemask", i ); Node excludefilemasknode = XMLHandler.getSubNodeByNr( filenode, "exclude_filemask", i ); Node fileRequirednode = XMLHandler.getSubNodeByNr( filenode, "file_required", i ); Node includeSubFoldersnode = XMLHandler.getSubNodeByNr( filenode, "include_subfolders", i ); fileName[i] = XMLHandler.getNodeValue( filenamenode ); fileMask[i] = XMLHandler.getNodeValue( filemasknode ); excludeFileMask[i] = XMLHandler.getNodeValue( excludefilemasknode ); fileRequired[i] = XMLHandler.getNodeValue( fileRequirednode ); includeSubFolders[i] = XMLHandler.getNodeValue( includeSubFoldersnode ); } for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); GetXMLDataField field = new GetXMLDataField( fnode ); inputFields[i] = field; } // Is there a limit on the number of rows we process? rowLimit = Const.toLong( XMLHandler.getTagValue( stepnode, "limit" ), 0L ); // Do we skip rows before starting to read loopxpath = XMLHandler.getTagValue( stepnode, "loopxpath" ); inFields = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "IsInFields" ) ); IsAFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "IsAFile" ) ); xmlField = XMLHandler.getTagValue( stepnode, "XmlField" ); prunePath = XMLHandler.getTagValue( stepnode, "prunePath" ); shortFileFieldName = XMLHandler.getTagValue( stepnode, "shortFileFieldName" ); pathFieldName = XMLHandler.getTagValue( stepnode, "pathFieldName" ); hiddenFieldName = XMLHandler.getTagValue( stepnode, "hiddenFieldName" ); lastModificationTimeFieldName = XMLHandler.getTagValue( stepnode, "lastModificationTimeFieldName" ); uriNameFieldName = XMLHandler.getTagValue( stepnode, "uriNameFieldName" ); rootUriNameFieldName = XMLHandler.getTagValue( stepnode, "rootUriNameFieldName" ); extensionFieldName = XMLHandler.getTagValue( stepnode, "extensionFieldName" ); sizeFieldName = XMLHandler.getTagValue( stepnode, "sizeFieldName" ); } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "GetXMLDataMeta.Exception.ErrorLoadingXML", e .toString() ) ); } }
Example 14
Source File: MySQLBulkLoader.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (MySQLBulkLoaderMeta) smi; data = (MySQLBulkLoaderData) sdi; if ( super.init( smi, sdi ) ) { // Confirming Database Connection is defined. try { verifyDatabaseConnection(); } catch ( KettleException ex ) { logError( ex.getMessage() ); return false; } if ( Utils.isEmpty( meta.getEnclosure() ) ) { data.quote = new byte[] {}; } else { data.quote = environmentSubstitute( meta.getEnclosure() ).getBytes(); } if ( Utils.isEmpty( meta.getDelimiter() ) ) { data.separator = "\t".getBytes(); } else { data.separator = environmentSubstitute( meta.getDelimiter() ).getBytes(); } data.newline = Const.CR.getBytes(); String realEncoding = environmentSubstitute( meta.getEncoding() ); data.bulkTimestampMeta = new ValueMetaDate( "timestampMeta" ); data.bulkTimestampMeta.setConversionMask( "yyyy-MM-dd HH:mm:ss" ); data.bulkTimestampMeta.setStringEncoding( realEncoding ); data.bulkDateMeta = new ValueMetaDate( "dateMeta" ); data.bulkDateMeta.setConversionMask( "yyyy-MM-dd" ); data.bulkDateMeta.setStringEncoding( realEncoding ); data.bulkNumberMeta = new ValueMetaNumber( "numberMeta" ); data.bulkNumberMeta.setConversionMask( "#.#" ); data.bulkNumberMeta.setGroupingSymbol( "," ); data.bulkNumberMeta.setDecimalSymbol( "." ); data.bulkNumberMeta.setStringEncoding( realEncoding ); data.bulkSize = Const.toLong( environmentSubstitute( meta.getBulkSize() ), -1L ); // Schema-table combination... data.schemaTable = meta.getDatabaseMeta().getQuotedSchemaTableCombination( environmentSubstitute( meta.getSchemaName() ), environmentSubstitute( meta.getTableName() ) ); return true; } return false; }
Example 15
Source File: LDIFInputMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { filefield = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "filefield" ) ); dynamicFilenameField = XMLHandler.getTagValue( stepnode, "dynamicFilenameField" ); includeFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "include" ) ); filenameField = XMLHandler.getTagValue( stepnode, "include_field" ); includeRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "rownum" ) ); rowNumberField = XMLHandler.getTagValue( stepnode, "rownum_field" ); includeContentType = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "contenttype" ) ); contentTypeField = XMLHandler.getTagValue( stepnode, "contenttype_field" ); DNField = XMLHandler.getTagValue( stepnode, "dn_field" ); includeDN = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "dn" ) ); addtoresultfilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "addtoresultfilename" ) ); multiValuedSeparator = XMLHandler.getTagValue( stepnode, "multiValuedSeparator" ); Node filenode = XMLHandler.getSubNode( stepnode, "file" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrFiles = XMLHandler.countNodes( filenode, "name" ); int nrFields = XMLHandler.countNodes( fields, "field" ); allocate( nrFiles, nrFields ); for ( int i = 0; i < nrFiles; i++ ) { Node filenamenode = XMLHandler.getSubNodeByNr( filenode, "name", i ); Node filemasknode = XMLHandler.getSubNodeByNr( filenode, "filemask", i ); Node excludefilemasknode = XMLHandler.getSubNodeByNr( filenode, "exclude_filemask", i ); Node fileRequirednode = XMLHandler.getSubNodeByNr( filenode, "file_required", i ); Node includeSubFoldersnode = XMLHandler.getSubNodeByNr( filenode, "include_subfolders", i ); fileName[i] = XMLHandler.getNodeValue( filenamenode ); fileMask[i] = XMLHandler.getNodeValue( filemasknode ); excludeFileMask[i] = XMLHandler.getNodeValue( excludefilemasknode ); fileRequired[i] = XMLHandler.getNodeValue( fileRequirednode ); includeSubFolders[i] = XMLHandler.getNodeValue( includeSubFoldersnode ); } for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); LDIFInputField field = new LDIFInputField( fnode ); inputFields[i] = field; } // Is there a limit on the number of rows we process? rowLimit = Const.toLong( XMLHandler.getTagValue( stepnode, "limit" ), 0L ); shortFileFieldName = XMLHandler.getTagValue( stepnode, "shortFileFieldName" ); pathFieldName = XMLHandler.getTagValue( stepnode, "pathFieldName" ); hiddenFieldName = XMLHandler.getTagValue( stepnode, "hiddenFieldName" ); lastModificationTimeFieldName = XMLHandler.getTagValue( stepnode, "lastModificationTimeFieldName" ); uriNameFieldName = XMLHandler.getTagValue( stepnode, "uriNameFieldName" ); rootUriNameFieldName = XMLHandler.getTagValue( stepnode, "rootUriNameFieldName" ); extensionFieldName = XMLHandler.getTagValue( stepnode, "extensionFieldName" ); sizeFieldName = XMLHandler.getTagValue( stepnode, "sizeFieldName" ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
Example 16
Source File: BeamWindowStepHandler.java From kettle-beam with Apache License 2.0 | 4 votes |
@Override public void handleStep( LogChannelInterface log, StepMeta stepMeta, Map<String, PCollection<KettleRow>> stepCollectionMap, Pipeline pipeline, RowMetaInterface inputRowMeta, List<StepMeta> previousSteps, PCollection<KettleRow> input ) throws KettleException { BeamWindowMeta beamWindowMeta = (BeamWindowMeta) stepMeta.getStepMetaInterface(); if ( StringUtils.isEmpty( beamWindowMeta.getWindowType() ) ) { throw new KettleException( "Please specify a window type in Beam Window step '" + stepMeta.getName() + "'" ); } String duration = transMeta.environmentSubstitute( beamWindowMeta.getDuration() ); long durationSeconds = Const.toLong( duration, -1L ); PCollection<KettleRow> stepPCollection; if ( BeamDefaults.WINDOW_TYPE_FIXED.equals( beamWindowMeta.getWindowType() ) ) { if ( durationSeconds <= 0 ) { throw new KettleException( "Please specify a valid positive window size (duration) for Beam window step '" + stepMeta.getName() + "'" ); } FixedWindows fixedWindows = FixedWindows .of( Duration.standardSeconds( durationSeconds ) ); stepPCollection = input.apply( Window.into( fixedWindows ) ); } else if ( BeamDefaults.WINDOW_TYPE_SLIDING.equals( beamWindowMeta.getWindowType() ) ) { if ( durationSeconds <= 0 ) { throw new KettleException( "Please specify a valid positive window size (duration) for Beam window step '" + stepMeta.getName() + "'" ); } String every = transMeta.environmentSubstitute( beamWindowMeta.getEvery() ); long everySeconds = Const.toLong( every, -1L ); SlidingWindows slidingWindows = SlidingWindows .of( Duration.standardSeconds( durationSeconds ) ) .every( Duration.standardSeconds( everySeconds ) ); stepPCollection = input.apply( Window.into( slidingWindows ) ); } else if ( BeamDefaults.WINDOW_TYPE_SESSION.equals( beamWindowMeta.getWindowType() ) ) { if ( durationSeconds < 600 ) { throw new KettleException( "Please specify a window size (duration) of at least 600 (10 minutes) for Beam window step '" + stepMeta.getName() + "'. This is the minimum gap between session windows." ); } Sessions sessionWindows = Sessions .withGapDuration( Duration.standardSeconds( durationSeconds ) ); stepPCollection = input.apply( Window.into( sessionWindows ) ); } else if ( BeamDefaults.WINDOW_TYPE_GLOBAL.equals( beamWindowMeta.getWindowType() ) ) { stepPCollection = input.apply( Window.into( new GlobalWindows() ) ); } else { throw new KettleException( "Beam Window type '" + beamWindowMeta.getWindowType() + " is not supported in step '" + stepMeta.getName() + "'" ); } // Now get window information about the window if we asked about it... // if ( StringUtils.isNotEmpty( beamWindowMeta.getStartWindowField() ) || StringUtils.isNotEmpty( beamWindowMeta.getEndWindowField() ) || StringUtils.isNotEmpty( beamWindowMeta.getMaxWindowField() ) ) { WindowInfoFn windowInfoFn = new WindowInfoFn( stepMeta.getName(), transMeta.environmentSubstitute( beamWindowMeta.getMaxWindowField() ), transMeta.environmentSubstitute( beamWindowMeta.getStartWindowField() ), transMeta.environmentSubstitute( beamWindowMeta.getMaxWindowField() ), JsonRowMeta.toJson( inputRowMeta ), stepPluginClasses, xpPluginClasses ); stepPCollection = stepPCollection.apply( ParDo.of( windowInfoFn ) ); } // Save this in the map // stepCollectionMap.put( stepMeta.getName(), stepPCollection ); log.logBasic( "Handled step (WINDOW) : " + stepMeta.getName() + ", gets data from " + previousSteps.size() + " previous step(s)" ); }
Example 17
Source File: YamlInputMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { includeFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "include" ) ); filenameField = XMLHandler.getTagValue( stepnode, "include_field" ); addResultFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "addresultfile" ) ); validating = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "validating" ) ); IsIgnoreEmptyFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "IsIgnoreEmptyFile" ) ); doNotFailIfNoFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "doNotFailIfNoFile" ) ); includeRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "rownum" ) ); rowNumberField = XMLHandler.getTagValue( stepnode, "rownum_field" ); encoding = XMLHandler.getTagValue( stepnode, "encoding" ); Node filenode = XMLHandler.getSubNode( stepnode, "file" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrFiles = XMLHandler.countNodes( filenode, "name" ); int nrFields = XMLHandler.countNodes( fields, "field" ); allocate( nrFiles, nrFields ); for ( int i = 0; i < nrFiles; i++ ) { Node filenamenode = XMLHandler.getSubNodeByNr( filenode, "name", i ); Node filemasknode = XMLHandler.getSubNodeByNr( filenode, "filemask", i ); Node fileRequirednode = XMLHandler.getSubNodeByNr( filenode, "file_required", i ); Node includeSubFoldersnode = XMLHandler.getSubNodeByNr( filenode, "include_subfolders", i ); fileName[i] = XMLHandler.getNodeValue( filenamenode ); fileMask[i] = XMLHandler.getNodeValue( filemasknode ); fileRequired[i] = XMLHandler.getNodeValue( fileRequirednode ); includeSubFolders[i] = XMLHandler.getNodeValue( includeSubFoldersnode ); } for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); YamlInputField field = new YamlInputField( fnode ); inputFields[i] = field; } // Is there a limit on the number of rows we process? rowLimit = Const.toLong( XMLHandler.getTagValue( stepnode, "limit" ), 0L ); inFields = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "IsInFields" ) ); IsAFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "IsAFile" ) ); yamlField = XMLHandler.getTagValue( stepnode, "YamlField" ); } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "YamlInputMeta.Exception.ErrorLoadingXML", e .toString() ) ); } }
Example 18
Source File: LoadFileInputMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { includeFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, INCLUDE ) ); filenameField = XMLHandler.getTagValue( stepnode, INCLUDE_FIELD ); addresultfile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, ADDRESULTFILE ) ); IsIgnoreEmptyFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, IS_IGNORE_EMPTY_FILE ) ); IsIgnoreMissingPath = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, IS_IGNORE_MISSING_PATH ) ); includeRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, ROWNUM ) ); rowNumberField = XMLHandler.getTagValue( stepnode, ROWNUM_FIELD ); encoding = XMLHandler.getTagValue( stepnode, ENCODING ); Node filenode = XMLHandler.getSubNode( stepnode, FILE ); Node fields = XMLHandler.getSubNode( stepnode, FIELDS ); int nrFiles = XMLHandler.countNodes( filenode, NAME ); int nrFields = XMLHandler.countNodes( fields, "field" ); allocate( nrFiles, nrFields ); for ( int i = 0; i < nrFiles; i++ ) { Node filenamenode = XMLHandler.getSubNodeByNr( filenode, NAME, i ); Node filemasknode = XMLHandler.getSubNodeByNr( filenode, FILEMASK, i ); Node excludefilemasknode = XMLHandler.getSubNodeByNr( filenode, EXCLUDE_FILEMASK, i ); Node fileRequirednode = XMLHandler.getSubNodeByNr( filenode, FILE_REQUIRED, i ); Node includeSubFoldersnode = XMLHandler.getSubNodeByNr( filenode, INCLUDE_SUBFOLDERS, i ); fileName[i] = XMLHandler.getNodeValue( filenamenode ); fileMask[i] = XMLHandler.getNodeValue( filemasknode ); excludeFileMask[i] = XMLHandler.getNodeValue( excludefilemasknode ); fileRequired[i] = XMLHandler.getNodeValue( fileRequirednode ); includeSubFolders[i] = XMLHandler.getNodeValue( includeSubFoldersnode ); } for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); LoadFileInputField field = new LoadFileInputField( fnode ); inputFields[i] = field; } // Is there a limit on the number of rows we process? rowLimit = Const.toLong( XMLHandler.getTagValue( stepnode, LIMIT ), 0L ); fileinfield = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, IS_IN_FIELDS ) ); DynamicFilenameField = XMLHandler.getTagValue( stepnode, DYNAMIC_FILENAME_FIELD ); shortFileFieldName = XMLHandler.getTagValue( stepnode, SHORT_FILE_FIELD_NAME ); pathFieldName = XMLHandler.getTagValue( stepnode, PATH_FIELD_NAME ); hiddenFieldName = XMLHandler.getTagValue( stepnode, HIDDEN_FIELD_NAME ); lastModificationTimeFieldName = XMLHandler.getTagValue( stepnode, LAST_MODIFICATION_TIME_FIELD_NAME ); uriNameFieldName = XMLHandler.getTagValue( stepnode, URI_NAME_FIELD_NAME ); rootUriNameFieldName = XMLHandler.getTagValue( stepnode, ROOT_URI_NAME_FIELD_NAME ); extensionFieldName = XMLHandler.getTagValue( stepnode, EXTENSION_FIELD_NAME ); } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "LoadFileInputMeta.Exception.ErrorLoadingXML", e .toString() ) ); } }
Example 19
Source File: XMLInputMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private void readData( Node stepnode ) throws KettleXMLException { try { includeFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "include" ) ); filenameField = XMLHandler.getTagValue( stepnode, "include_field" ); includeRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "rownum" ) ); rowNumberField = XMLHandler.getTagValue( stepnode, "rownum_field" ); fileBaseURI = XMLHandler.getTagValue( stepnode, "file_base_uri" ); ignoreEntities = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "ignore_entities" ) ); namespaceAware = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "namespace_aware" ) ); Node filenode = XMLHandler.getSubNode( stepnode, "file" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); Node positions = XMLHandler.getSubNode( stepnode, "positions" ); int nrFiles = XMLHandler.countNodes( filenode, "name" ); int nrFields = XMLHandler.countNodes( fields, "field" ); int nrPositions = XMLHandler.countNodes( positions, "position" ); allocate( nrFiles, nrFields, nrPositions ); for ( int i = 0; i < nrFiles; i++ ) { Node filenamenode = XMLHandler.getSubNodeByNr( filenode, "name", i ); Node filemasknode = XMLHandler.getSubNodeByNr( filenode, "filemask", i ); fileName[i] = XMLHandler.getNodeValue( filenamenode ); fileMask[i] = XMLHandler.getNodeValue( filemasknode ); } for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); XMLInputField field = new XMLInputField( fnode ); inputFields[i] = field; } for ( int i = 0; i < nrPositions; i++ ) { Node positionnode = XMLHandler.getSubNodeByNr( positions, "position", i ); inputPosition[i] = XMLHandler.getNodeValue( positionnode ); } // Is there a limit on the number of rows we process? rowLimit = Const.toLong( XMLHandler.getTagValue( stepnode, "limit" ), 0L ); // Do we skip rows before starting to read nrRowsToSkip = Const.toInt( XMLHandler.getTagValue( stepnode, "skip" ), 0 ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
Example 20
Source File: SalesforceInput.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (SalesforceInputMeta) smi; data = (SalesforceInputData) sdi; if ( super.init( smi, sdi ) ) { // get total fields in the grid data.nrfields = meta.getInputFields().length; // Check if field list is filled if ( data.nrfields == 0 ) { log.logError( BaseMessages.getString( PKG, "SalesforceInputDialog.FieldsMissing.DialogMessage" ) ); return false; } String soSQL = environmentSubstitute( meta.getQuery() ); try { if ( meta.isSpecifyQuery() ) { // Check if user specified a query if ( Utils.isEmpty( soSQL ) ) { log.logError( BaseMessages.getString( PKG, "SalesforceInputDialog.QueryMissing.DialogMessage" ) ); return false; } } else { // check records filter if ( meta.getRecordsFilter() != SalesforceConnectionUtils.RECORDS_FILTER_ALL ) { String realFromDateString = environmentSubstitute( meta.getReadFrom() ); if ( Utils.isEmpty( realFromDateString ) ) { log.logError( BaseMessages.getString( PKG, "SalesforceInputDialog.FromDateMissing.DialogMessage" ) ); return false; } String realToDateString = environmentSubstitute( meta.getReadTo() ); if ( Utils.isEmpty( realToDateString ) ) { log.logError( BaseMessages.getString( PKG, "SalesforceInputDialog.ToDateMissing.DialogMessage" ) ); return false; } try { SimpleDateFormat dateFormat = new SimpleDateFormat( SalesforceInputMeta.DATE_TIME_FORMAT ); data.startCal = new GregorianCalendar(); data.startCal.setTime( dateFormat.parse( realFromDateString ) ); data.endCal = new GregorianCalendar(); data.endCal.setTime( dateFormat.parse( realToDateString ) ); dateFormat = null; } catch ( Exception e ) { log.logError( BaseMessages.getString( PKG, "SalesforceInput.ErrorParsingDate" ), e ); return false; } } } data.limit = Const.toLong( environmentSubstitute( meta.getRowLimit() ), 0 ); // Do we have to query for all records included deleted records data.connection.setQueryAll( meta.isQueryAll() ); // Build query if needed if ( meta.isSpecifyQuery() ) { // Free hand SOQL Query data.connection.setSQL( soSQL.replace( "\n\r", " " ).replace( "\n", " " ) ); } else { // Set calendars for update or deleted records if ( meta.getRecordsFilter() != SalesforceConnectionUtils.RECORDS_FILTER_ALL ) { data.connection.setCalendar( meta.getRecordsFilter(), data.startCal, data.endCal ); } if ( meta.getRecordsFilter() == SalesforceConnectionUtils.RECORDS_FILTER_UPDATED ) { // Return fields list data.connection.setFieldsList( BuiltSOQl() ); } else { // Build now SOQL data.connection.setSQL( BuiltSOQl() ); } } // Now connect ... data.connection.connect(); return true; } catch ( KettleException ke ) { logError( BaseMessages.getString( PKG, "SalesforceInput.Log.ErrorOccurredDuringStepInitialize" ) + ke.getMessage() ); return false; } } return false; }