Java Code Examples for org.apache.commons.vfs2.FileObject#getURL()
The following examples show how to use
org.apache.commons.vfs2.FileObject#getURL() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Mail.java From hop with Apache License 2.0 | 6 votes |
private void addAttachedFilePart( FileObject file ) throws Exception { // create a data source MimeBodyPart files = new MimeBodyPart(); // create a data source URLDataSource fds = new URLDataSource( file.getURL() ); // get a data Handler to manipulate this file type; files.setDataHandler( new DataHandler( fds ) ); // include the file in the data source files.setFileName( file.getName().getBaseName() ); // insist on base64 to preserve line endings files.addHeader( "Content-Transfer-Encoding", "base64" ); // add the part with the file in the BodyPart(); data.parts.addBodyPart( files ); if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "Mail.Log.AttachedFile", fds.getName() ) ); } }
Example 2
Source File: Mail.java From pentaho-kettle with Apache License 2.0 | 6 votes |
private void addAttachedFilePart( FileObject file ) throws Exception { // create a data source MimeBodyPart files = new MimeBodyPart(); // create a data source URLDataSource fds = new URLDataSource( file.getURL() ); // get a data Handler to manipulate this file type; files.setDataHandler( new DataHandler( fds ) ); // include the file in the data source files.setFileName( file.getName().getBaseName() ); // insist on base64 to preserve line endings files.addHeader( "Content-Transfer-Encoding", "base64" ); // add the part with the file in the BodyPart(); data.parts.addBodyPart( files ); if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "Mail.Log.AttachedFile", fds.getName() ) ); } }
Example 3
Source File: PropertiesConfigurationProperties.java From pentaho-hadoop-shims with Apache License 2.0 | 5 votes |
private static PropertiesConfiguration initPropertiesConfiguration( FileObject fileObject ) throws FileSystemException, ConfigurationException { PropertiesConfiguration propertiesConfiguration = new PropertiesConfiguration( fileObject.getURL() ); propertiesConfiguration.setAutoSave( true ); FileChangedReloadingStrategy fileChangedReloadingStrategy = new FileChangedReloadingStrategy(); fileChangedReloadingStrategy.setRefreshDelay( 1000L ); propertiesConfiguration.setReloadingStrategy( fileChangedReloadingStrategy ); return propertiesConfiguration; }
Example 4
Source File: FileObjectResourceLoader.java From pentaho-reporting with GNU Lesser General Public License v2.1 | 5 votes |
public URL toURL( final ResourceKey key ) { if ( key == null ) { throw new NullPointerException(); } if ( isSupportedKey( key ) == false ) { throw new IllegalArgumentException( "Key format is not recognized." ); } try { final FileObject fileObject = (FileObject) key.getIdentifier(); return fileObject.getURL(); } catch ( FileSystemException e ) { return null; } }
Example 5
Source File: SchedulerStateRest.java From scheduling with GNU Affero General Public License v3.0 | 4 votes |
@Override public boolean pushFile(String sessionId, String spaceName, String filePath, MultipartFormDataInput multipart) throws IOException, NotConnectedRestException, PermissionRestException { try { checkAccess(sessionId, "pushFile"); Session session = dataspaceRestApi.checkSessionValidity(sessionId); Map<String, List<InputPart>> formDataMap = multipart.getFormDataMap(); List<InputPart> fNL = formDataMap.get("fileName"); if ((fNL == null) || (fNL.isEmpty())) { throw new IllegalArgumentException("Illegal multipart argument definition (fileName), received " + fNL); } String fileName = fNL.get(0).getBody(String.class, null); List<InputPart> fCL = formDataMap.get("fileContent"); if ((fCL == null) || (fCL.isEmpty())) { throw new IllegalArgumentException("Illegal multipart argument definition (fileContent), received " + fCL); } InputStream fileContent = fCL.get(0).getBody(InputStream.class, null); if (fileName == null) { throw new IllegalArgumentException("Wrong file name : " + fileName); } filePath = normalizeFilePath(filePath, fileName); FileObject destfo = dataspaceRestApi.resolveFile(session, spaceName, filePath); URL targetUrl = destfo.getURL(); logger.info("[pushFile] pushing file to " + targetUrl); if (!destfo.isWriteable()) { RuntimeException ex = new IllegalArgumentException("File " + filePath + " is not writable in space " + spaceName); logger.error(ex); throw ex; } if (destfo.exists()) { destfo.delete(); } // used to create the necessary directories if needed destfo.createFile(); dataspaceRestApi.writeFile(fileContent, destfo, null); return true; } finally { if (multipart != null) { multipart.close(); } } }
Example 6
Source File: ShimConfigsLoader.java From pentaho-hadoop-shims with Apache License 2.0 | 4 votes |
@SuppressWarnings( "squid:S3776" ) public static URL getURLToResourceFile( String siteFileName, String additionalPath ) { try { FileObject currentPath = null; if ( additionalPath != null && !additionalPath.equals( "" ) ) { currentPath = KettleVFS.getFileObject( Const.getKettleDirectory() + File.separator + CONFIGS_DIR_PREFIX + File.separator + additionalPath + File.separator + siteFileName ); if ( currentPath.exists() ) { return currentPath.getURL(); } currentPath = KettleVFS.getFileObject( Const.getUserHomeDirectory() + File.separator + ".pentaho" + File.separator + CONFIGS_DIR_PREFIX + File.separator + additionalPath + File.separator + siteFileName ); if ( currentPath.exists() ) { return currentPath.getURL(); } currentPath = KettleVFS.getFileObject( Const.getUserHomeDirectory() + File.separator + CONFIGS_DIR_PREFIX + File.separator + additionalPath + File.separator + siteFileName ); if ( currentPath.exists() ) { return currentPath.getURL(); } // normal metastore locations failed, see if there's a metastore in the big-data-plugin folder // this should only exist if this instance of pentaho were created to run on a yarn cluster PluginInterface pluginInterface = PluginRegistry.getInstance().findPluginWithId( LifecyclePluginType.class, "HadoopSpoonPlugin" ); currentPath = KettleVFS.getFileObject( pluginInterface.getPluginDirectory().getPath() + File.separator + CONFIGS_DIR_PREFIX + File.separator + additionalPath + File.separator + siteFileName ); if ( currentPath.exists() ) { return currentPath.getURL(); } } // cluster name was missing or else config files were not found; try looking for a legacy configuration String defaultShim = LegacyShimLocator.getLegacyDefaultShimName(); List<ShimIdentifierInterface> shimIdentifers = LegacyShimLocator.getInstance().getRegisteredShims(); for ( ShimIdentifierInterface shim : shimIdentifers ) { if ( shim.getId().equals( defaultShim ) ) { // only return the legacy folder if the shim still exists currentPath = KettleVFS.getFileObject( LegacyShimLocator.getLegacyDefaultShimDir( defaultShim ) + File.separator + siteFileName ); if ( currentPath.exists() ) { log.logBasic( BaseMessages.getString( PKG, "ShimConfigsLoader.UsingLegacyConfig" ) ); return currentPath.getURL(); } } } // Work around to avoid multiple logging for VFS if ( ( CLUSTER_NAME_FOR_LOGGING.isEmpty() ) || ( !CLUSTER_NAME_FOR_LOGGING.contains( additionalPath ) ) ) { SITE_FILE_NAME.clear(); log.logBasic( BaseMessages.getString( PKG, "ShimConfigsLoader.UnableToFindConfigs" ), siteFileName, additionalPath ); CLUSTER_NAME_FOR_LOGGING.add( additionalPath ); SITE_FILE_NAME.add( siteFileName ); } else if ( ( SITE_FILE_NAME.isEmpty() ) || ( !SITE_FILE_NAME.contains( siteFileName ) ) ) { log.logBasic( BaseMessages.getString( PKG, "ShimConfigsLoader.UnableToFindConfigs" ), siteFileName, additionalPath ); SITE_FILE_NAME.add( siteFileName ); } } catch ( KettleFileException | IOException ex ) { log.logError( BaseMessages.getString( PKG, "ShimConfigsLoader.ExceptionReadingFile" ), siteFileName, additionalPath, ex.getStackTrace() ); } return null; }