Java Code Examples for org.eclipse.jetty.util.StringUtil#isBlank()
The following examples show how to use
org.eclipse.jetty.util.StringUtil#isBlank() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DBConfigFromFile.java From hadoop-ozone with Apache License 2.0 | 6 votes |
public static File getConfigLocation() throws IOException { String path = System.getenv(CONFIG_DIR); // Make testing easy. // If there is No Env. defined, let us try to read the JVM property if (StringUtil.isBlank(path)) { path = System.getProperty(CONFIG_DIR); } if (StringUtil.isBlank(path)) { LOG.debug("Unable to find the configuration directory. " + "Please make sure that HADOOP_CONF_DIR is setup correctly."); } if(StringUtil.isBlank(path)){ return null; } return new File(path); }
Example 2
Source File: DBStoreBuilder.java From hadoop-ozone with Apache License 2.0 | 6 votes |
/** * Builds a DBStore instance and returns that. * * @return DBStore */ public DBStore build() throws IOException { if(StringUtil.isBlank(dbname) || (dbPath == null)) { LOG.error("Required Parameter missing."); throw new IOException("Required parameter is missing. Please make sure " + "sure Path and DB name is provided."); } processDBProfile(); processTables(); DBOptions options = getDbProfile(); WriteOptions writeOptions = new WriteOptions(); writeOptions.setSync(rocksDBConfiguration.getSyncOption()); File dbFile = getDBFile(); if (!dbFile.getParentFile().exists()) { throw new IOException("The DB destination directory should exist."); } return new RDBStore(dbFile, options, writeOptions, tables, registry); }
Example 3
Source File: Response.java From onedev with MIT License | 6 votes |
@Override public void addCookie(Cookie cookie) { if (StringUtil.isBlank(cookie.getName())) throw new IllegalArgumentException("Cookie.name cannot be blank/null"); String comment = cookie.getComment(); // HttpOnly was supported as a comment in cookie flags before the java.net.HttpCookie implementation so need to check that boolean httpOnly = cookie.isHttpOnly() || HttpCookie.isHttpOnlyInComment(comment); SameSite sameSite = HttpCookie.getSameSiteFromComment(comment); comment = HttpCookie.getCommentWithoutAttributes(comment); addCookie(new HttpCookie( cookie.getName(), cookie.getValue(), cookie.getDomain(), cookie.getPath(), (long)cookie.getMaxAge(), httpOnly, cookie.getSecure(), comment, cookie.getVersion(), sameSite)); }
Example 4
Source File: DBStoreBuilder.java From hadoop-ozone with Apache License 2.0 | 5 votes |
private File getDBFile() throws IOException { if (dbPath == null) { LOG.error("DB path is required."); throw new IOException("A Path to for DB file is needed."); } if (StringUtil.isBlank(dbname)) { LOG.error("DBName is a required."); throw new IOException("A valid DB name is required."); } return Paths.get(dbPath.toString(), dbname).toFile(); }
Example 5
Source File: QueryHandlerServlet.java From clickhouse-jdbc-bridge with Apache License 2.0 | 5 votes |
@Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { try { String query = req.getParameter("query"); if (StringUtil.isBlank(query)) { // a hack for wrong input from CH String requestBody = StreamUtils.toString(req.getInputStream()); String[] parts = requestBody.split("query=", 2); if (parts.length == 2) { query = parts[1]; } } if (StringUtil.isBlank(query)) { throw new IllegalArgumentException("Query is blank or empty"); } try (Connection connection = manager.get(req.getParameter("connection_string")); Statement sth = connection.createStatement()) { ResultSet resultset = sth.executeQuery(query); ResultSetMetaData meta = resultset.getMetaData(); ClickHouseRowSerializer serializer = ClickHouseRowSerializer.create(meta); ClickHouseRowBinaryStream stream = new ClickHouseRowBinaryStream(resp.getOutputStream(), null, new ClickHouseProperties()); resp.setContentType("application/octet-stream"); while (resultset.next()) { serializer.serialize(resultset, stream); } } } catch (Exception err) { log.error(err.getMessage(), err); resp.sendError(HttpStatus.INTERNAL_SERVER_ERROR_500, err.getMessage()); } }
Example 6
Source File: JdbcBridge.java From clickhouse-jdbc-bridge with Apache License 2.0 | 5 votes |
/** * If a path to log file given, then redirect logs there */ private void configureLogging() throws Exception { final boolean isLogFileBlank = StringUtil.isBlank(config.getLogPath()); if (config.isDaemon() && isLogFileBlank) { throw new IllegalArgumentException("You can not run as daemon, and without specifying log path"); } if (!isLogFileBlank) { Map<String, String> pocoToJavaLogMap = new HashMap<>(); pocoToJavaLogMap.put("critical", "error"); pocoToJavaLogMap.put("warning", "warn"); pocoToJavaLogMap.put("notice", "warn"); pocoToJavaLogMap.put("information", "info"); String givenLogLevel = pocoToJavaLogMap.getOrDefault(config.getLogLevel(), "trace").toUpperCase(); URL url = Resources.getResource("log4j-redirect.properties"); String text = Resources.toString(url, Charsets.UTF_8); text = text .replaceAll("#LOGLEVEL#", givenLogLevel) .replaceAll("#LOGFILE#", config.getLogPath()); Properties properties = new Properties(); properties.load(new StringReader(text)); PropertyConfigurator.configure(properties); } }
Example 7
Source File: Response.java From onedev with MIT License | 5 votes |
public void addCookie(HttpCookie cookie) { if (StringUtil.isBlank(cookie.getName())) throw new IllegalArgumentException("Cookie.name cannot be blank/null"); // add the set cookie _fields.add(new SetCookieHttpField(cookie, getHttpChannel().getHttpConfiguration().getResponseCookieCompliance())); // Expire responses with set-cookie headers so they do not get cached. _fields.put(__EXPIRES_01JAN1970); }
Example 8
Source File: OAuthClientServiceImpl.java From smarthome with Eclipse Public License 2.0 | 5 votes |
@Override public AccessTokenResponse refreshToken() throws OAuthException, IOException, OAuthResponseException { if (isClosed()) { throw new OAuthException(EXCEPTION_MESSAGE_CLOSED); } AccessTokenResponse lastAccessToken; try { lastAccessToken = storeHandler.loadAccessTokenResponse(handle); } catch (GeneralSecurityException e) { throw new OAuthException("Cannot decrypt access token from store", e); } if (lastAccessToken == null) { throw new OAuthException( "Cannot refresh token because last access token is not available from handle: " + handle); } if (lastAccessToken.getRefreshToken() == null) { throw new OAuthException("Cannot refresh token because last access token did not have a refresh token"); } String tokenUrl = persistedParams.tokenUrl; if (tokenUrl == null) { throw new OAuthException("tokenUrl is required but null"); } OAuthConnector connector = new OAuthConnector(httpClientFactory); AccessTokenResponse accessTokenResponse = connector.grantTypeRefreshToken(tokenUrl, lastAccessToken.getRefreshToken(), persistedParams.clientId, persistedParams.clientSecret, persistedParams.scope, Boolean.TRUE.equals(persistedParams.supportsBasicAuth)); // The service may not return the refresh token so use the last refresh token otherwise it's not stored. if (StringUtil.isBlank(accessTokenResponse.getRefreshToken())) { accessTokenResponse.setRefreshToken(lastAccessToken.getRefreshToken()); } // store it storeHandler.saveAccessTokenResponse(handle, accessTokenResponse); accessTokenRefreshListeners.forEach(l -> l.onAccessTokenResponse(accessTokenResponse)); return accessTokenResponse; }
Example 9
Source File: LogMessage.java From nifi with Apache License 2.0 | 5 votes |
private void processFlowFile( final ComponentLog logger, final MessageLogLevel logLevel, final FlowFile flowFile, final ProcessContext context) { String logPrefix = context.getProperty(LOG_PREFIX).evaluateAttributeExpressions(flowFile).getValue(); String logMessage = context.getProperty(LOG_MESSAGE).evaluateAttributeExpressions(flowFile).getValue(); String messageToWrite; if (StringUtil.isBlank(logPrefix)) { messageToWrite = logMessage; } else { messageToWrite = String.format("%s%s", logPrefix, logMessage); } // Uses optional property to specify logging level switch (logLevel) { case info: logger.info(messageToWrite); break; case debug: logger.debug(messageToWrite); break; case warn: logger.warn(messageToWrite); break; case trace: logger.trace(messageToWrite); break; case error: logger.error(messageToWrite); break; default: logger.debug(messageToWrite); } }
Example 10
Source File: OmMetadataManagerImpl.java From hadoop-ozone with Apache License 2.0 | 4 votes |
/** * @param userName volume owner, null for listing all volumes. */ @Override public List<OmVolumeArgs> listVolumes(String userName, String prefix, String startKey, int maxKeys) throws IOException { if (StringUtil.isBlank(userName)) { // null userName represents listing all volumes in cluster. return listAllVolumes(prefix, startKey, maxKeys); } final List<OmVolumeArgs> result = Lists.newArrayList(); final List<String> volumes = getVolumesByUser(userName) .getVolumeNamesList(); int index = 0; if (!Strings.isNullOrEmpty(startKey)) { index = volumes.indexOf( startKey.startsWith(OzoneConsts.OM_KEY_PREFIX) ? startKey.substring(1) : startKey); // Exclude the startVolume as part of the result. index = index != -1 ? index + 1 : index; } final String startChar = prefix == null ? "" : prefix; while (index != -1 && index < volumes.size() && result.size() < maxKeys) { final String volumeName = volumes.get(index); if (volumeName.startsWith(startChar)) { final OmVolumeArgs volumeArgs = getVolumeTable() .get(getVolumeKey(volumeName)); if (volumeArgs == null) { // Could not get volume info by given volume name, // since the volume name is loaded from db, // this probably means om db is corrupted or some entries are // accidentally removed. throw new OMException("Volume info not found for " + volumeName, ResultCodes.VOLUME_NOT_FOUND); } result.add(volumeArgs); } index++; } return result; }
Example 11
Source File: LogAttribute.java From localization_nifi with Apache License 2.0 | 4 votes |
protected String processFlowFile(final ComponentLog logger, final DebugLevels logLevel, final FlowFile flowFile, final ProcessSession session, final ProcessContext context) { final Set<String> attributeKeys = getAttributesToLog(flowFile.getAttributes().keySet(), context); final ComponentLog LOG = getLogger(); final String dashedLine; String logPrefix = context.getProperty(LOG_PREFIX).evaluateAttributeExpressions(flowFile).getValue(); if (StringUtil.isBlank(logPrefix)) { dashedLine = StringUtils.repeat('-', 50); } else { // abbreviate long lines logPrefix = StringUtils.abbreviate(logPrefix, 40); // center the logPrefix and pad with dashes logPrefix = StringUtils.center(logPrefix, 40, '-'); // five dashes on the left and right side, plus the dashed logPrefix dashedLine = StringUtils.repeat('-', 5) + logPrefix + StringUtils.repeat('-', 5); } // Pretty print metadata final StringBuilder message = new StringBuilder(); message.append("logging for flow file ").append(flowFile); message.append("\n"); message.append(dashedLine); message.append("\nStandard FlowFile Attributes"); message.append(String.format("\nKey: '%1$s'\n\tValue: '%2$s'", "entryDate", new Date(flowFile.getEntryDate()))); message.append(String.format("\nKey: '%1$s'\n\tValue: '%2$s'", "lineageStartDate", new Date(flowFile.getLineageStartDate()))); message.append(String.format("\nKey: '%1$s'\n\tValue: '%2$s'", "fileSize", flowFile.getSize())); message.append("\nFlowFile Attribute Map Content"); for (final String key : attributeKeys) { message.append(String.format("\nKey: '%1$s'\n\tValue: '%2$s'", key, flowFile.getAttribute(key))); } message.append("\n"); message.append(dashedLine); // The user can request to log the payload final boolean logPayload = context.getProperty(LOG_PAYLOAD).asBoolean(); if (logPayload) { message.append("\n"); if (flowFile.getSize() < ONE_MB) { final FlowFilePayloadCallback callback = new FlowFilePayloadCallback(); session.read(flowFile, callback); message.append(callback.getContents()); } else { message.append("\n Not including payload since it is larger than one mb."); } } final String outputMessage = message.toString().trim(); // Uses optional property to specify logging level switch (logLevel) { case info: LOG.info(outputMessage); break; case debug: LOG.debug(outputMessage); break; case warn: LOG.warn(outputMessage); break; case trace: LOG.trace(outputMessage); break; case error: LOG.error(outputMessage); break; default: LOG.debug(outputMessage); } return outputMessage; }
Example 12
Source File: AlertConfigBean.java From incubator-pinot with Apache License 2.0 | 4 votes |
public String getType() { if (StringUtil.isBlank(type)) { return ""; } return type; }
Example 13
Source File: AlertConfigBean.java From incubator-pinot with Apache License 2.0 | 4 votes |
public String getProperties() { if (StringUtil.isBlank(properties)) { return ""; } return properties; }
Example 14
Source File: LogAttribute.java From nifi with Apache License 2.0 | 4 votes |
protected String processFlowFile(final ComponentLog logger, final DebugLevels logLevel, final FlowFile flowFile, final ProcessSession session, final ProcessContext context) { final Set<String> attributeKeys = getAttributesToLog(flowFile.getAttributes().keySet(), context); final ComponentLog LOG = getLogger(); final String dashedLine; String logPrefix = context.getProperty(LOG_PREFIX).evaluateAttributeExpressions(flowFile).getValue(); Charset charset = Charset.forName(context.getProperty(CHARSET).evaluateAttributeExpressions(flowFile).getValue()); if (StringUtil.isBlank(logPrefix)) { dashedLine = StringUtils.repeat('-', 50); } else { // abbreviate long lines logPrefix = StringUtils.abbreviate(logPrefix, 40); // center the logPrefix and pad with dashes logPrefix = StringUtils.center(logPrefix, 40, '-'); // five dashes on the left and right side, plus the dashed logPrefix dashedLine = StringUtils.repeat('-', 5) + logPrefix + StringUtils.repeat('-', 5); } // Pretty print metadata final StringBuilder message = new StringBuilder(); message.append("logging for flow file ").append(flowFile); message.append("\n"); message.append(dashedLine); message.append("\nStandard FlowFile Attributes"); message.append(String.format("\nKey: '%1$s'\n\tValue: '%2$s'", "entryDate", new Date(flowFile.getEntryDate()))); message.append(String.format("\nKey: '%1$s'\n\tValue: '%2$s'", "lineageStartDate", new Date(flowFile.getLineageStartDate()))); message.append(String.format("\nKey: '%1$s'\n\tValue: '%2$s'", "fileSize", flowFile.getSize())); message.append("\nFlowFile Attribute Map Content"); for (final String key : attributeKeys) { message.append(String.format("\nKey: '%1$s'\n\tValue: '%2$s'", key, flowFile.getAttribute(key))); } message.append("\n"); message.append(dashedLine); // The user can request to log the payload final boolean logPayload = context.getProperty(LOG_PAYLOAD).asBoolean(); if (logPayload) { message.append("\n"); if (flowFile.getSize() < ONE_MB) { final FlowFilePayloadCallback callback = new FlowFilePayloadCallback(charset); session.read(flowFile, callback); message.append(callback.getContents()); } else { message.append("\n Not including payload since it is larger than one mb."); } } final String outputMessage = message.toString().trim(); // Uses optional property to specify logging level switch (logLevel) { case info: LOG.info(outputMessage); break; case debug: LOG.debug(outputMessage); break; case warn: LOG.warn(outputMessage); break; case trace: LOG.trace(outputMessage); break; case error: LOG.error(outputMessage); break; default: LOG.debug(outputMessage); } return outputMessage; }