Java Code Examples for org.pentaho.di.core.vfs.KettleVFS#getFilename()
The following examples show how to use
org.pentaho.di.core.vfs.KettleVFS#getFilename() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JobEntryCheckFilesLocked.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private void checkFilesLocked( FileObject[] files ) throws KettleException { for ( int i = 0; i < files.length && !oneFileLocked; i++ ) { FileObject file = files[i]; String filename = KettleVFS.getFilename( file ); LockFile locked = new LockFile( filename ); if ( locked.isLocked() ) { oneFileLocked = true; logError( BaseMessages.getString( PKG, "JobCheckFilesLocked.Log.FileLocked", filename ) ); } else { if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobCheckFilesLocked.Log.FileNotLocked", filename ) ); } } } }
Example 2
Source File: BaseFileInputStep.java From pentaho-kettle with Apache License 2.0 | 6 votes |
/** * Prepare file-dependent data for fill additional fields. */ protected void fillFileAdditionalFields( D data, FileObject file ) throws FileSystemException { data.shortFilename = file.getName().getBaseName(); data.path = KettleVFS.getFilename( file.getParent() ); data.hidden = file.isHidden(); data.extension = file.getName().getExtension(); data.uriName = file.getName().getURI(); data.rootUriName = file.getName().getRootURI(); if ( file.getType().hasContent() ) { data.lastModificationDateTime = new Date( file.getContent().getLastModifiedTime() ); data.size = file.getContent().getSize(); } else { data.lastModificationDateTime = null; data.size = null; } }
Example 3
Source File: JsonInput.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override protected void fillFileAdditionalFields( JsonInputData data, FileObject file ) throws FileSystemException { super.fillFileAdditionalFields( data, file ); data.filename = KettleVFS.getFilename( file ); data.filenr++; if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JsonInput.Log.OpeningFile", file.toString() ) ); } addFileToResultFilesname( file ); }
Example 4
Source File: JobEntryShell.java From pentaho-kettle with Apache License 2.0 | 5 votes |
private FileObject createTemporaryShellFile( FileObject tempFile, String fileContent ) throws Exception { // Create a unique new temporary filename in the working directory, put the script in there // Set the permissions to execute and then run it... // if ( tempFile != null && fileContent != null ) { try { // flag indicates if current OS is Windows or not boolean isWindows = Const.isWindows(); if ( !isWindows ) { fileContent = replaceWinEOL( fileContent ); } tempFile.createFile(); OutputStream outputStream = tempFile.getContent().getOutputStream(); outputStream.write( fileContent.getBytes() ); outputStream.close(); if ( !isWindows ) { String tempFilename = KettleVFS.getFilename( tempFile ); // Now we have to make this file executable... // On Unix-like systems this is done using the command "/bin/chmod +x filename" // ProcessBuilder procBuilder = new ProcessBuilder( "chmod", "+x", tempFilename ); Process proc = procBuilder.start(); // Eat/log stderr/stdout all messages in a different thread... StreamLogger errorLogger = new StreamLogger( log, proc.getErrorStream(), toString() + " (stderr)" ); StreamLogger outputLogger = new StreamLogger( log, proc.getInputStream(), toString() + " (stdout)" ); new Thread( errorLogger ).start(); new Thread( outputLogger ).start(); proc.waitFor(); } } catch ( Exception e ) { throw new Exception( "Unable to create temporary file to execute script", e ); } } return tempFile; }
Example 5
Source File: FileInputList.java From pentaho-kettle with Apache License 2.0 | 5 votes |
public String[] getFileStrings() { String[] fileStrings = new String[ files.size() ]; for ( int i = 0; i < fileStrings.length; i++ ) { fileStrings[ i ] = KettleVFS.getFilename( files.get( i ) ); } return fileStrings; }
Example 6
Source File: BaseFileInputStep.java From pentaho-kettle with Apache License 2.0 | 4 votes |
/** * Open next VFS file for processing. * * This method will support different parallelization methods later. */ protected boolean openNextFile() { try { if ( data.currentFileIndex >= data.files.nrOfFiles() ) { // all files already processed return false; } // Is this the last file? data.file = data.files.getFile( data.currentFileIndex ); data.filename = KettleVFS.getFilename( data.file ); fillFileAdditionalFields( data, data.file ); if ( meta.inputFiles.passingThruFields ) { StringBuilder sb = new StringBuilder(); sb.append( data.currentFileIndex ).append( "_" ).append( data.file ); data.currentPassThruFieldsRow = data.passThruFields.get( sb.toString() ); } // Add this files to the result of this transformation. // if ( meta.inputFiles.isaddresult ) { ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, data.file, getTransMeta().getName(), toString() ); resultFile.setComment( "File was read by an Text File input step" ); addResultFile( resultFile ); } if ( log.isBasic() ) { logBasic( "Opening file: " + data.file.getName().getFriendlyURI() ); } data.dataErrorLineHandler.handleFile( data.file ); data.reader = createReader( meta, data, data.file ); } catch ( Exception e ) { if ( !handleOpenFileException( e ) ) { return false; } data.reader = null; } // Move file pointer ahead! data.currentFileIndex++; return true; }
Example 7
Source File: LucidDBBulkLoader.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public boolean execute( LucidDBBulkLoaderMeta meta, boolean wait ) throws KettleException { Runtime rt = Runtime.getRuntime(); try { String tableName = environmentSubstitute( meta.getTableName() ); // 1) Set up the FIFO folder, create the directory and path to it... // String fifoVfsDirectory = environmentSubstitute( meta.getFifoDirectory() ); FileObject directory = KettleVFS.getFileObject( fifoVfsDirectory, getTransMeta() ); directory.createFolder(); String fifoDirectory = KettleVFS.getFilename( directory ); // 2) Create the FIFO file using the "mkfifo" command... // Make sure to log all the possible output, also from STDERR // data.fifoFilename = KettleVFS.getFilename( directory ) + Const.FILE_SEPARATOR + tableName + ".csv"; data.bcpFilename = KettleVFS.getFilename( directory ) + Const.FILE_SEPARATOR + tableName + ".bcp"; File fifoFile = new File( data.fifoFilename ); if ( !fifoFile.exists() ) { String mkFifoCmd = "mkfifo " + data.fifoFilename + ""; logBasic( "Creating FIFO file using this command : " + mkFifoCmd ); Process mkFifoProcess = rt.exec( mkFifoCmd ); StreamLogger errorLogger = new StreamLogger( log, mkFifoProcess.getErrorStream(), "mkFifoError" ); StreamLogger outputLogger = new StreamLogger( log, mkFifoProcess.getInputStream(), "mkFifoOuptut" ); new Thread( errorLogger ).start(); new Thread( outputLogger ).start(); int result = mkFifoProcess.waitFor(); if ( result != 0 ) { throw new Exception( "Return code " + result + " received from statement : " + mkFifoCmd ); } } // 3) Make a connection to LucidDB for sending SQL commands // (Also, we need a clear cache for getting up-to-date target metadata) DBCache.getInstance().clear( meta.getDatabaseMeta().getName() ); if ( meta.getDatabaseMeta() == null ) { logError( BaseMessages.getString( PKG, "LuciDBBulkLoader.Init.ConnectionMissing", getStepname() ) ); return false; } data.db = new Database( this, meta.getDatabaseMeta() ); data.db.shareVariablesWith( this ); // Connect to the database if ( getTransMeta().isUsingUniqueConnections() ) { synchronized ( getTrans() ) { data.db.connect( getTrans().getTransactionId(), getPartitionID() ); } } else { data.db.connect( getPartitionID() ); } logBasic( "Connected to LucidDB" ); // 4) Now we are ready to create the LucidDB FIFO server that will handle the actual bulk loading. // String fifoServerStatement = ""; fifoServerStatement += "create or replace server " + meta.getFifoServerName() + Const.CR; fifoServerStatement += "foreign data wrapper sys_file_wrapper" + Const.CR; fifoServerStatement += "options (" + Const.CR; fifoServerStatement += "directory '" + fifoDirectory + "'," + Const.CR; fifoServerStatement += "file_extension 'csv'," + Const.CR; fifoServerStatement += "with_header 'no'," + Const.CR; fifoServerStatement += "num_rows_scan '0'," + Const.CR; fifoServerStatement += "lenient 'no');" + Const.CR; logBasic( "Creating LucidDB fifo_server with the following command: " + fifoServerStatement ); data.db.execStatements( fifoServerStatement ); // 5) Set the error limit in the LucidDB session // REVIEW jvs 13-Dec-2008: is this guaranteed to retain the same // connection? String errorMaxStatement = ""; errorMaxStatement += "alter session set \"errorMax\" = " + meta.getMaxErrors() + ";" + Const.CR; logBasic( "Setting error limit in LucidDB session with the following command: " + errorMaxStatement ); data.db.execStatements( errorMaxStatement ); // 6) Now we also need to create a bulk loader file .bcp // createBulkLoadConfigFile( data.bcpFilename ); // 7) execute the actual load command! // This will actually block until the load is done in the // separate execution thread; see notes in executeLoadCommand // on why it's important for this to occur BEFORE // opening our end of the FIFO. // executeLoadCommand( tableName ); // 8) We have to write rows to the FIFO file later on. data.fifoStream = new BufferedOutputStream( new FileOutputStream( fifoFile ) ); } catch ( Exception ex ) { throw new KettleException( ex ); } return true; }
Example 8
Source File: GPLoad.java From pentaho-kettle with Apache License 2.0 | 4 votes |
/** * Returns the path to the pathToFile. It should be the same as what was passed but this method will check the file * system to see if the path is valid. * * @param pathToFile * Path to the file to verify. * @param exceptionMessage * The message to use when the path is not provided. * @param checkExistence * When true the path's existence will be verified. * @return * @throws KettleException */ private String getPath( String pathToFile, String exceptionMessage, boolean checkExistenceOfFile ) throws KettleException { // Make sure the path is not empty if ( Utils.isEmpty( pathToFile ) ) { throw new KettleException( exceptionMessage ); } // make sure the variable substitution is not empty pathToFile = environmentSubstitute( pathToFile ).trim(); if ( Utils.isEmpty( pathToFile ) ) { throw new KettleException( exceptionMessage ); } FileObject fileObject = KettleVFS.getFileObject( pathToFile, getTransMeta() ); try { // we either check the existence of the file if ( checkExistenceOfFile ) { if ( !fileObject.exists() ) { throw new KettleException( BaseMessages.getString( PKG, "GPLoad.Execption.FileDoesNotExist", pathToFile ) ); } } else { // if the file does not have to exist, the parent, or source folder, does. FileObject parentFolder = fileObject.getParent(); if ( parentFolder.exists() ) { return KettleVFS.getFilename( fileObject ); } else { throw new KettleException( BaseMessages.getString( PKG, "GPLoad.Exception.DirectoryDoesNotExist", parentFolder.getURL().getPath() ) ); } } // if Windows is the OS if ( Const.getOS().startsWith( "Windows" ) ) { return addQuotes( pathToFile ); } else { return KettleVFS.getFilename( fileObject ); } } catch ( FileSystemException fsex ) { throw new KettleException( BaseMessages.getString( PKG, "GPLoad.Exception.GPLoadCommandBuild", fsex.getMessage() ) ); } }
Example 9
Source File: ExcelInput.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (ExcelInputMeta) smi; data = (ExcelInputData) sdi; if ( super.init( smi, sdi ) ) { initErrorHandling(); initReplayFactory(); data.files = meta.getFileList( this ); if ( data.files.nrOfFiles() == 0 && data.files.nrOfMissingFiles() > 0 && !meta.isAcceptingFilenames() ) { logError( BaseMessages.getString( PKG, "ExcelInput.Error.NoFileSpecified" ) ); return false; } if ( meta.getEmptyFields().size() > 0 ) { // Determine the maximum filename length... data.maxfilelength = -1; for ( FileObject file : data.files.getFiles() ) { String name = KettleVFS.getFilename( file ); if ( name.length() > data.maxfilelength ) { data.maxfilelength = name.length(); } } // Determine the maximum sheet name length... data.maxsheetlength = -1; if ( !meta.readAllSheets() ) { data.sheetNames = new String[meta.getSheetName().length]; data.startColumn = new int[meta.getSheetName().length]; data.startRow = new int[meta.getSheetName().length]; for ( int i = 0; i < meta.getSheetName().length; i++ ) { data.sheetNames[i] = meta.getSheetName()[i]; data.startColumn[i] = meta.getStartColumn()[i]; data.startRow[i] = meta.getStartRow()[i]; if ( meta.getSheetName()[i].length() > data.maxsheetlength ) { data.maxsheetlength = meta.getSheetName()[i].length(); } } } else { // Allocated at open file time: we want ALL sheets. if ( meta.getStartRow().length == 1 ) { data.defaultStartRow = meta.getStartRow()[0]; } else { data.defaultStartRow = 0; } if ( meta.getStartColumn().length == 1 ) { data.defaultStartColumn = meta.getStartColumn()[0]; } else { data.defaultStartColumn = 0; } } return true; } else { logError( BaseMessages.getString( PKG, "ExcelInput.Error.NotInputFieldsDefined" ) ); } } return false; }
Example 10
Source File: OraBulkDataOutput.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@VisibleForTesting String getFilename( FileObject fileObject ) { return KettleVFS.getFilename( fileObject ); }
Example 11
Source File: OraBulkLoader.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@VisibleForTesting String getFilename( FileObject fileObject ) { return KettleVFS.getFilename( fileObject ); }
Example 12
Source File: JobEntryGetPOP.java From pentaho-kettle with Apache License 2.0 | 4 votes |
String createOutputDirectory( int folderType ) throws KettleException, FileSystemException, IllegalArgumentException { if ( ( folderType != JobEntryGetPOP.FOLDER_OUTPUT ) && ( folderType != JobEntryGetPOP.FOLDER_ATTACHMENTS ) ) { throw new IllegalArgumentException( "Invalid folderType argument" ); } String folderName = ""; switch ( folderType ) { case JobEntryGetPOP.FOLDER_OUTPUT: folderName = getRealOutputDirectory(); break; case JobEntryGetPOP.FOLDER_ATTACHMENTS: if ( isSaveAttachment() && isDifferentFolderForAttachment() ) { folderName = getRealAttachmentFolder(); } else { folderName = getRealOutputDirectory(); } break; } if ( Utils.isEmpty( folderName ) ) { switch ( folderType ) { case JobEntryGetPOP.FOLDER_OUTPUT: throw new KettleException( BaseMessages .getString( PKG, "JobGetMailsFromPOP.Error.OutputFolderEmpty" ) ); case JobEntryGetPOP.FOLDER_ATTACHMENTS: throw new KettleException( BaseMessages .getString( PKG, "JobGetMailsFromPOP.Error.AttachmentFolderEmpty" ) ); } } FileObject folder = KettleVFS.getFileObject( folderName, this ); if ( folder.exists() ) { if ( folder.getType() != FileType.FOLDER ) { switch ( folderType ) { case JobEntryGetPOP.FOLDER_OUTPUT: throw new KettleException( BaseMessages.getString( PKG, "JobGetMailsFromPOP.Error.NotAFolderNot", folderName ) ); case JobEntryGetPOP.FOLDER_ATTACHMENTS: throw new KettleException( BaseMessages.getString( PKG, "JobGetMailsFromPOP.Error.AttachmentFolderNotAFolder", folderName ) ); } } if ( isDebug() ) { switch ( folderType ) { case JobEntryGetPOP.FOLDER_OUTPUT: logDebug( BaseMessages.getString( PKG, "JobGetMailsFromPOP.Log.OutputFolderExists", folderName ) ); break; case JobEntryGetPOP.FOLDER_ATTACHMENTS: logDebug( BaseMessages.getString( PKG, "JobGetMailsFromPOP.Log.AttachmentFolderExists", folderName ) ); break; } } } else { if ( isCreateLocalFolder() ) { folder.createFolder(); } else { switch ( folderType ) { case JobEntryGetPOP.FOLDER_OUTPUT: throw new KettleException( BaseMessages.getString( PKG, "JobGetMailsFromPOP.Error.OutputFolderNotExist", folderName ) ); case JobEntryGetPOP.FOLDER_ATTACHMENTS: throw new KettleException( BaseMessages.getString( PKG, "JobGetMailsFromPOP.Error.AttachmentFolderNotExist", folderName ) ); } } } String returnValue = KettleVFS.getFilename( folder ); try { folder.close(); } catch ( IOException ignore ) { //Ignore error, as the folder was created successfully } return returnValue; }
Example 13
Source File: YamlInput.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private Object[] getRowData() throws KettleException { // Build an empty row based on the meta-data Object[] outputRowData = null; try { // Create new row... outputRowData = data.yaml.getRow( data.rowMeta ); if ( outputRowData == null ) { return null; } if ( data.readrow != null ) { outputRowData = RowDataUtil.addRowData( data.readrow, data.totalPreviousFields, outputRowData ); } else { outputRowData = RowDataUtil.resizeArray( outputRowData, data.totalOutStreamFields ); } int rowIndex = data.totalOutFields; // See if we need to add the filename to the row... if ( meta.includeFilename() && !Utils.isEmpty( meta.getFilenameField() ) ) { outputRowData[rowIndex++] = KettleVFS.getFilename( data.file ); } // See if we need to add the row number to the row... if ( meta.includeRowNumber() && !Utils.isEmpty( meta.getRowNumberField() ) ) { outputRowData[rowIndex++] = new Long( data.rownr ); } } catch ( Exception e ) { boolean sendToErrorRow = false; String errorMessage = null; if ( getStepMeta().isDoingErrorHandling() ) { sendToErrorRow = true; errorMessage = e.toString(); } else { logError( BaseMessages.getString( PKG, "YamlInput.ErrorInStepRunning", e.toString() ) ); setErrors( 1 ); stopAll(); logError( Const.getStackTracker( e ) ); setOutputDone(); // signal end to receiver(s) } if ( sendToErrorRow ) { // Simply add this row to the error row putError( getInputRowMeta(), outputRowData, 1, errorMessage, null, "YamlInput001" ); } } return outputRowData; }
Example 14
Source File: YamlReader.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public void loadFile( FileObject file ) throws Exception { this.file = file; this.filename = KettleVFS.getFilename( file ); loadFile( filename ); }
Example 15
Source File: JobEntryFTP.java From pentaho-kettle with Apache License 2.0 | 4 votes |
private void downloadFile( FTPClient ftpclient, String filename, String realMoveToFolder, Job parentJob, Result result ) throws Exception { String localFilename = filename; targetFilename = KettleVFS.getFilename( KettleVFS.getFileObject( returnTargetFilename( localFilename ) ) ); if ( ( !onlyGettingNewFiles ) || ( onlyGettingNewFiles && needsDownload( targetFilename ) ) ) { if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEntryFTP.GettingFile", filename, environmentSubstitute( targetDirectory ) ) ); } ftpclient.get( targetFilename, filename ); // Update retrieved files updateRetrievedFiles(); if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEntryFTP.GotFile", filename ) ); } // Add filename to result filenames addFilenameToResultFilenames( result, parentJob, targetFilename ); // Delete the file if this is needed! if ( remove ) { ftpclient.delete( filename ); if ( isDetailed() ) { if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEntryFTP.DeletedFile", filename ) ); } } } else { if ( movefiles ) { // Try to move file to destination folder ... ftpclient.rename( filename, realMoveToFolder + FILE_SEPARATOR + filename ); if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEntryFTP.MovedFile", filename, realMoveToFolder ) ); } } } } }
Example 16
Source File: JobEntryDosToUnix.java From pentaho-kettle with Apache License 2.0 | 4 votes |
@VisibleForTesting boolean convert( FileObject file, boolean toUnix ) { boolean retval = false; // CR = CR // LF = LF try { String localfilename = KettleVFS.getFilename( file ); File source = new File( localfilename ); if ( isDetailed() ) { if ( toUnix ) { logDetailed( BaseMessages.getString( PKG, "JobDosToUnix.Log.ConvertingFileToUnix", source .getAbsolutePath() ) ); } else { logDetailed( BaseMessages.getString( PKG, "JobDosToUnix.Log.ConvertingFileToDos", source .getAbsolutePath() ) ); } } File tempFile = new File( tempFolder, source.getName() + ".tmp" ); if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "JobDosToUnix.Log.CreatingTempFile", tempFile.getAbsolutePath() ) ); } final int FOUR_KB = 4 * 1024; byte[] buffer = new byte[ FOUR_KB ]; try ( FileOutputStream out = new FileOutputStream( tempFile ); FileInputStream in = new FileInputStream( localfilename ) ) { ConversionAutomata automata = new ConversionAutomata( out, toUnix ); int read; while ( ( read = in.read( buffer ) ) > 0 ) { automata.convert( buffer, read ); } } if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "JobDosToUnix.Log.DeletingSourceFile", localfilename ) ); } if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "JobDosToUnix.Log.RenamingTempFile", tempFile.getAbsolutePath(), source.getAbsolutePath() ) ); } Files.move( tempFile.toPath(), source.toPath(), StandardCopyOption.REPLACE_EXISTING ); retval = true; } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JobDosToUnix.Log.ErrorConvertingFile", file.toString(), e .getMessage() ) ); } return retval; }
Example 17
Source File: TeraFast.java From pentaho-kettle with Apache License 2.0 | 2 votes |
/** * @param fileName * the filename to resolve. may contain Kettle Environment variables. * @return the data file name. * @throws IOException * ... */ private String resolveFileName( final String fileName ) throws KettleException { final FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( fileName ) ); return KettleVFS.getFilename( fileObject ); }