Java Code Examples for org.apache.commons.io.FileUtils#getTempDirectory()
The following examples show how to use
org.apache.commons.io.FileUtils#getTempDirectory() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Issue313Test.java From jtwig-core with Apache License 2.0 | 6 votes |
@Test public void allowingForCurrentDirectoryRelativePaths() throws Exception { File parentDirectory = FileUtils.getTempDirectory(); File subDirectory = new File(parentDirectory, "temp"); subDirectory.mkdirs(); File file1 = new File(parentDirectory, "file1.twig"); File file2 = new File(subDirectory, "file2.twig"); FileUtils.write(file1, "{% include './temp/file2.twig' %}"); FileUtils.write(file2, "{{ var }}"); String result = JtwigTemplate.fileTemplate(file1) .render(JtwigModel.newModel() .with("var", "Hi") ); assertThat(result, is("Hi")); }
Example 2
Source File: FlinkStreamSqlInterpreterTest.java From zeppelin with Apache License 2.0 | 6 votes |
@Test public void testResumeStreamSqlFromInvalidSavePointPath() throws IOException, InterpreterException, InterruptedException, TimeoutException { String initStreamScalaScript = getInitStreamScript(1000); InterpreterResult result = flinkInterpreter.interpret(initStreamScalaScript, getInterpreterContext()); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); File savepointPath = FileUtils.getTempDirectory(); InterpreterContext context = getInterpreterContext(); context.getLocalProperties().put("type", "update"); context.getLocalProperties().put("savepointPath", savepointPath.getAbsolutePath()); context.getLocalProperties().put("parallelism", "1"); context.getLocalProperties().put("maxParallelism", "10"); InterpreterResult result2 = sqlInterpreter.interpret("select url, count(1) as pv from " + "log group by url", context); // due to invalid savepointPath, failed to submit job and throw exception assertEquals(InterpreterResult.Code.ERROR, result2.code()); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertTrue(resultMessages.toString().contains("Failed to submit job.")); }
Example 3
Source File: AttachmentRestSupportImpl.java From spring-backend-boilerplate with Apache License 2.0 | 6 votes |
@Override public FileObject uploadAvatar(UploadFileRequest uploadFileRequest, HttpServletRequest request, HttpServletResponse response) throws IOException { validate(uploadFileRequest); MultipartFile multipartFile = HttpMultipartUtils.resolveMultipartFile(request); if (multipartFile == null || multipartFile.isEmpty()) { throw new FileStorageException("The multipart of http request is required."); } FileStorageRequest fileStorageRequest = buildFileStorageRequest(multipartFile, uploadFileRequest); File tempFile = new File(FileUtils.getTempDirectory(), UUID.randomUUID() + ".tmp"); multipartFile.transferTo(tempFile); File resizedTempFile = new File(FileUtils.getTempDirectory(), UUID.randomUUID() + ".tmp"); ImageUtils.zoomBySquare(tempFile, resizedTempFile, 128, true); return doUpload(fileStorageRequest, resizedTempFile); }
Example 4
Source File: Issue294Test.java From jtwig-core with Apache License 2.0 | 5 votes |
@Test public void jtwigSupportsAbsolutePaths() throws Exception { File baseDir = new File(new File(FileUtils.getTempDirectory(), "jtwig"), "test"); baseDir.mkdirs(); File child = new File(baseDir, "child.twig"); FileUtils.write(child, "{{ var }}"); File parent = File.createTempFile("jtwig", "test"); FileUtils.write(parent, "{% extends '"+ escapeJtwig(baseDir.getAbsolutePath() + File.separator)+ "child.twig' %}"); String result = JtwigTemplate.fileTemplate(parent).render(JtwigModel.newModel() .with("var", "Hi")); assertThat(result, is("Hi")); }
Example 5
Source File: DocumentStore.java From elexis-3-core with Eclipse Public License 1.0 | 5 votes |
/** * Saves the content to a temp file and returns the absolute path of the temp file. if no * tempfileprefix is defined default is 'export'. if no tempfileSuffix is defined default is * 'tmp'. * * @param document * @param tempFilePrefix * @param tempFileSuffix * @param deleteOnExit * @return * @throws ElexisException */ public String saveContentToTempFile(IDocument document, String tempFilePrefix, String tempFileSuffix, boolean deleteOnExit) throws ElexisException{ Optional<InputStream> in = getService(document.getStoreId()).loadContent(document); if (in.isPresent()) { try { if (StringUtils.isEmpty(tempFilePrefix)) { tempFilePrefix = "export"; } if (StringUtils.isEmpty(tempFileSuffix)) { tempFileSuffix = "tmp"; } File tmpFile = new File(FileUtils.getTempDirectory(), FileUtil.removeInvalidChars(tempFilePrefix) + "." + tempFileSuffix); FileUtils.copyInputStreamToFile(in.get(), tmpFile); if (deleteOnExit) { tmpFile.deleteOnExit(); } return tmpFile.getAbsolutePath(); } catch (IOException e) { throw new ElexisException("cannot save content", e); } } return null; }
Example 6
Source File: WxClient.java From weixin-sdk with Apache License 2.0 | 5 votes |
private File httpDownload(String url) { HttpGet httpGet = new HttpGet(url); try (CloseableHttpResponse response = httpClient.execute(httpGet)) { StatusLine statusLine = response.getStatusLine(); HttpEntity entity = response.getEntity(); if (statusLine.getStatusCode() >= 300) { EntityUtils.consume(entity); throw new WxRuntimeException(statusLine.getStatusCode(), statusLine.getReasonPhrase()); } else { Header[] dispositionHeaders = response.getHeaders("Content-disposition"); if (dispositionHeaders != null && dispositionHeaders.length > 0) { String fileName = extractFileName(dispositionHeaders[0].getValue()); if (fileName == null || "".equals(fileName.trim())) { logger.warn("Cannot get filename from Content-disposition"); fileName = UUID.randomUUID().toString(); } InputStream inputStream = entity.getContent(); File tempFile = new File(FileUtils.getTempDirectory(), fileName); FileUtils.copyInputStreamToFile(inputStream, tempFile); return tempFile; } else { String errors = entity == null ? null : EntityUtils.toString(entity, Consts.UTF_8); logger.warn("download file : {} failed: {}", url, errors); if (errors.contains("errcode")) { WxError wxError = WxError.fromJson(errors); throw new WxRuntimeException(wxError); } else { throw new WxRuntimeException(999, errors); } } } } catch (IOException e) { logger.error("http download: {} failed.", url, e); throw new WxRuntimeException(999, e.getMessage()); } }
Example 7
Source File: DefaultFileManager.java From xmanager with Apache License 2.0 | 5 votes |
private static File getTmpFile() throws IOException { File tmpDir = FileUtils.getTempDirectory(); if (!tmpDir.exists()) { tmpDir.mkdirs(); } String tmpFileName = (Math.random() * 10000 + "").replace(".", ""); return new File(tmpDir, tmpFileName); }
Example 8
Source File: JhoveUtility.java From proarc with GNU General Public License v3.0 | 5 votes |
/** * Creates the JHOVE context and stores its configuration in a default temp folder. * Use {@link JhoveContext#destroy() } to remove temp folder. * * @return the context * @throws MetsExportException failure */ public static JhoveContext createContext() throws MetsExportException { File temp = new File(FileUtils.getTempDirectory(), "jhove" + UUID.randomUUID().toString()); if (!temp.mkdir()) { throw new MetsExportException("Cannot create " + temp.toString()); } temp.deleteOnExit(); return createContext(temp); }
Example 9
Source File: FakePartitionLevelConsumer.java From incubator-pinot with Apache License 2.0 | 5 votes |
FakePartitionLevelConsumer(int partition, StreamConfig streamConfig) { // TODO: this logic can move to a FakeStreamProducer instead of being inside the Consumer File tempDir = new File(FileUtils.getTempDirectory(), getClass().getSimpleName()); File outputDir = new File(tempDir, String.valueOf(partition)); int offset = 0; try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream(65536)) { File avroFile = unpackAvroTarFile(outputDir).get(0); int numPartitions = FakeStreamConfigUtils.getNumPartitions(streamConfig); try (DataFileStream<GenericRecord> reader = AvroUtils.getAvroReader(avroFile)) { BinaryEncoder binaryEncoder = new EncoderFactory().directBinaryEncoder(outputStream, null); GenericDatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(reader.getSchema()); int recordNumber = 0; for (GenericRecord genericRecord : reader) { if (getPartitionNumber(recordNumber++, numPartitions) != partition) { continue; } outputStream.reset(); datumWriter.write(genericRecord, binaryEncoder); binaryEncoder.flush(); byte[] bytes = outputStream.toByteArray(); // contiguous offsets messageOffsets.add(offset++); messageBytes.add(bytes); } } } catch (Exception e) { LOGGER.error("Could not create {}", FakePartitionLevelConsumer.class.getName(), e); } finally { FileUtils.deleteQuietly(outputDir); } }
Example 10
Source File: TablesResourceTest.java From incubator-pinot with Apache License 2.0 | 5 votes |
private boolean downLoadAndVerifySegmentContent(String tableNameWithType, IndexSegment segment) { String segmentPath = "/segments/" + tableNameWithType + "/" + segment.getSegmentName(); // Download the segment and save to a temp local file. Response response = _webTarget.path(segmentPath).request().get(Response.class); Assert.assertEquals(response.getStatus(), Response.Status.OK.getStatusCode()); File segmentFile = response.readEntity(File.class); File tempMetadataDir = new File(FileUtils.getTempDirectory(), "segment_metadata"); try (// Extract metadata.properties InputStream metadataPropertiesInputStream = TarGzCompressionUtils .unTarOneFile(new FileInputStream(segmentFile), V1Constants.MetadataKeys.METADATA_FILE_NAME); // Extract creation.meta InputStream creationMetaInputStream = TarGzCompressionUtils .unTarOneFile(new FileInputStream(segmentFile), V1Constants.SEGMENT_CREATION_META)) { Preconditions .checkState(tempMetadataDir.mkdirs(), "Failed to create directory: %s", tempMetadataDir.getAbsolutePath()); Preconditions.checkNotNull(metadataPropertiesInputStream, "%s does not exist", V1Constants.MetadataKeys.METADATA_FILE_NAME); Path metadataPropertiesPath = FileSystems.getDefault() .getPath(tempMetadataDir.getAbsolutePath(), V1Constants.MetadataKeys.METADATA_FILE_NAME); Files.copy(metadataPropertiesInputStream, metadataPropertiesPath); Preconditions.checkNotNull(creationMetaInputStream, "%s does not exist", V1Constants.SEGMENT_CREATION_META); Path creationMetaPath = FileSystems.getDefault().getPath(tempMetadataDir.getAbsolutePath(), V1Constants.SEGMENT_CREATION_META); Files.copy(creationMetaInputStream, creationMetaPath); // Load segment metadata SegmentMetadataImpl metadata = new SegmentMetadataImpl(tempMetadataDir); Assert.assertEquals(tableNameWithType, metadata.getTableName()); return true; } catch (Exception e) { LOGGER.error("Failure in segment extraction and verification:", e); return false; } finally { FileUtils.deleteQuietly(tempMetadataDir); } }
Example 11
Source File: TicketsTest.java From weixin-sdk with Apache License 2.0 | 5 votes |
@Test public void testCreatePermString(){ Ticket ticket = Tickets.defaultTickets().permanent("abc"); Assert.assertNotNull(ticket); byte[] bytes = Tickets.defaultTickets().getQrcode(ticket.getTicket()); File file = new File(FileUtils.getTempDirectory(), "qrcode.jpg"); try { IOUtils.write(bytes, new BufferedOutputStream(new FileOutputStream(file))); } catch (IOException e) { e.printStackTrace(); } }
Example 12
Source File: SeleniumCodeGeneratorAndUtil.java From gatf with Apache License 2.0 | 5 votes |
public static void clean() throws Exception { File gcdir = new File(FileUtils.getTempDirectory(), "gatf-code"); if(gcdir.exists()) { FileUtils.deleteDirectory(gcdir); } File dir = new File(FileUtils.getTempDirectory(), "gatf-code/com/gatf/selenium/"); dir.mkdirs(); }
Example 13
Source File: RedisProtocolParser.java From Redis-Synyed with Apache License 2.0 | 5 votes |
/** * 创建临时文件管道对象的方法 * * @throws RedisProtocolException * 当发生问题时抛出该异常 */ private void createTempFileChannel() throws RedisProtocolException { try { tempFile = new File(FileUtils.getTempDirectory(), System.currentTimeMillis() + ".synyed"); tempRandomAccessFile = new RandomAccessFile(tempFile, "rw"); this.tempFileChannel = tempRandomAccessFile.getChannel(); } catch (FileNotFoundException e) { throw new RedisProtocolException(e); } }
Example 14
Source File: HadoopSegmentCreationMapper.java From incubator-pinot with Apache License 2.0 | 5 votes |
@Override public void setup(Context context) { _jobConf = context.getConfiguration(); Yaml yaml = new Yaml(); String segmentGenerationJobSpecStr = _jobConf.get(SEGMENT_GENERATION_JOB_SPEC); _spec = yaml.loadAs(segmentGenerationJobSpecStr, SegmentGenerationJobSpec.class); LOGGER.info("Segment generation job spec : {}", segmentGenerationJobSpecStr); _localTempDir = new File(FileUtils.getTempDirectory(), "pinot-" + System.currentTimeMillis()); // Load Pinot Plugins copied from Distributed cache. File localPluginsTarFile = new File(PINOT_PLUGINS_TAR_GZ); if (localPluginsTarFile.exists()) { File pluginsDirFile = new File(PINOT_PLUGINS_DIR); try { TarGzCompressionUtils.unTar(localPluginsTarFile, pluginsDirFile); } catch (Exception e) { LOGGER.error("Failed to untar local Pinot plugins tarball file [{}]", localPluginsTarFile, e); throw new RuntimeException(e); } LOGGER.info("Trying to set System Property: {}={}", PLUGINS_DIR_PROPERTY_NAME, pluginsDirFile.getAbsolutePath()); System.setProperty(PLUGINS_DIR_PROPERTY_NAME, pluginsDirFile.getAbsolutePath()); String pluginsIncludes = _jobConf.get(PLUGINS_INCLUDE_PROPERTY_NAME); if (pluginsIncludes != null) { LOGGER.info("Trying to set System Property: {}={}", PLUGINS_INCLUDE_PROPERTY_NAME, pluginsIncludes); System.setProperty(PLUGINS_INCLUDE_PROPERTY_NAME, pluginsIncludes); } LOGGER.info("Pinot plugins System Properties are set at [{}], plugins includes [{}]", System.getProperty(PLUGINS_DIR_PROPERTY_NAME), System.getProperty(PLUGINS_INCLUDE_PROPERTY_NAME)); } else { LOGGER.warn("Cannot find local Pinot plugins directory at [{}]", localPluginsTarFile.getAbsolutePath()); } }
Example 15
Source File: StorageManager.java From sanshanblog with Apache License 2.0 | 4 votes |
private static File getTmpFile() { File tmpDir = FileUtils.getTempDirectory(); String tmpFileName = (Math.random() * 10000 + "").replace(".", ""); return new File(tmpDir, tmpFileName); }
Example 16
Source File: StorageManager.java From cms with Apache License 2.0 | 4 votes |
private static File getTmpFile() { File tmpDir = FileUtils.getTempDirectory(); String tmpFileName = (Math.random() * 10000 + "").replace(".", ""); return new File(tmpDir, tmpFileName); }
Example 17
Source File: StorageManager.java From kvf-admin with MIT License | 4 votes |
private static File getTmpFile() { File tmpDir = FileUtils.getTempDirectory(); String tmpFileName = (Math.random() * 10000 + "").replace(".", ""); return new File(tmpDir, tmpFileName); }
Example 18
Source File: Quickstart.java From incubator-pinot with Apache License 2.0 | 4 votes |
public void execute() throws Exception { File quickstartTmpDir = new File(FileUtils.getTempDirectory(), String.valueOf(System.currentTimeMillis())); File configDir = new File(quickstartTmpDir, "configs"); File dataDir = new File(quickstartTmpDir, "data"); Preconditions.checkState(configDir.mkdirs()); Preconditions.checkState(dataDir.mkdirs()); File schemaFile = new File(configDir, "baseballStats_schema.json"); File dataFile = new File(configDir, "baseballStats_data.csv"); File tableConfigFile = new File(configDir, "baseballStats_offline_table_config.json"); File ingestionJobSpecFile = new File(configDir, "ingestionJobSpec.yaml"); ClassLoader classLoader = Quickstart.class.getClassLoader(); URL resource = classLoader.getResource("examples/batch/baseballStats/baseballStats_schema.json"); com.google.common.base.Preconditions.checkNotNull(resource); FileUtils.copyURLToFile(resource, schemaFile); resource = classLoader.getResource("examples/batch/baseballStats/rawdata/baseballStats_data.csv"); com.google.common.base.Preconditions.checkNotNull(resource); FileUtils.copyURLToFile(resource, dataFile); resource = classLoader.getResource("examples/batch/baseballStats/ingestionJobSpec.yaml"); com.google.common.base.Preconditions.checkNotNull(resource); FileUtils.copyURLToFile(resource, ingestionJobSpecFile); resource = classLoader.getResource("examples/batch/baseballStats/baseballStats_offline_table_config.json"); com.google.common.base.Preconditions.checkNotNull(resource); FileUtils.copyURLToFile(resource, tableConfigFile); QuickstartTableRequest request = new QuickstartTableRequest("baseballStats", schemaFile, tableConfigFile, ingestionJobSpecFile, FileFormat.CSV); final QuickstartRunner runner = new QuickstartRunner(Lists.newArrayList(request), 1, 1, 1, dataDir); printStatus(Color.CYAN, "***** Starting Zookeeper, controller, broker and server *****"); runner.startAll(); Runtime.getRuntime().addShutdownHook(new Thread(() -> { try { printStatus(Color.GREEN, "***** Shutting down offline quick start *****"); runner.stop(); FileUtils.deleteDirectory(quickstartTmpDir); } catch (Exception e) { e.printStackTrace(); } })); printStatus(Color.CYAN, "***** Adding baseballStats table *****"); runner.addTable(); printStatus(Color.CYAN, "***** Launch data ingestion job to build index segment for baseballStats and push to controller *****"); runner.launchDataIngestionJob(); printStatus(Color.CYAN, "***** Waiting for 5 seconds for the server to fetch the assigned segment *****"); Thread.sleep(5000); printStatus(Color.YELLOW, "***** Offline quickstart setup complete *****"); String q1 = "select count(*) from baseballStats limit 1"; printStatus(Color.YELLOW, "Total number of documents in the table"); printStatus(Color.CYAN, "Query : " + q1); printStatus(Color.YELLOW, prettyPrintResponse(runner.runQuery(q1))); printStatus(Color.GREEN, "***************************************************"); String q2 = "select playerName, sum(runs) from baseballStats group by playerName order by sum(runs) desc limit 5"; printStatus(Color.YELLOW, "Top 5 run scorers of all time "); printStatus(Color.CYAN, "Query : " + q2); printStatus(Color.YELLOW, prettyPrintResponse(runner.runQuery(q2))); printStatus(Color.GREEN, "***************************************************"); String q3 = "select playerName, sum(runs) from baseballStats where yearID=2000 group by playerName order by sum(runs) desc limit 5"; printStatus(Color.YELLOW, "Top 5 run scorers of the year 2000"); printStatus(Color.CYAN, "Query : " + q3); printStatus(Color.YELLOW, prettyPrintResponse(runner.runQuery(q3))); printStatus(Color.GREEN, "***************************************************"); String q4 = "select playerName, sum(runs) from baseballStats where yearID>=2000 group by playerName order by sum(runs) desc limit 10"; printStatus(Color.YELLOW, "Top 10 run scorers after 2000"); printStatus(Color.CYAN, "Query : " + q4); printStatus(Color.YELLOW, prettyPrintResponse(runner.runQuery(q4))); printStatus(Color.GREEN, "***************************************************"); String q5 = "select playerName, runs, homeRuns from baseballStats order by yearID limit 10"; printStatus(Color.YELLOW, "Print playerName,runs,homeRuns for 10 records from the table and order them by yearID"); printStatus(Color.CYAN, "Query : " + q5); printStatus(Color.YELLOW, prettyPrintResponse(runner.runQuery(q5))); printStatus(Color.GREEN, "***************************************************"); printStatus(Color.GREEN, "You can always go to http://localhost:9000/query to play around in the query console"); }
Example 19
Source File: RestrictToELTest.java From owltools with BSD 3-Clause "New" or "Revised" License | 4 votes |
private void writeOWLOntolog(OWLGraphWrapper gmod, String fileName) throws OWLOntologyStorageException { File file = new File(FileUtils.getTempDirectory(), fileName); IRI iri = IRI.create(file); System.out.println("Saving ontology to file: "+file); gmod.getSourceOntology().getOWLOntologyManager().saveOntology(gmod.getSourceOntology(), new OWLXMLDocumentFormat(), iri); }
Example 20
Source File: GitHubEventsQuickstart.java From incubator-pinot with Apache License 2.0 | 4 votes |
public void execute(String personalAccessToken) throws Exception { final File quickStartDataDir = new File("githubEvents" + System.currentTimeMillis()); if (!quickStartDataDir.exists()) { Preconditions.checkState(quickStartDataDir.mkdirs()); } File schemaFile = new File(quickStartDataDir, "pullRequestMergedEvents_schema.json"); File tableConfigFile = new File(quickStartDataDir, "pullRequestMergedEvents_realtime_table_config.json"); ClassLoader classLoader = Quickstart.class.getClassLoader(); URL resource = classLoader.getResource("examples/stream/githubEvents/pullRequestMergedEvents_schema.json"); Preconditions.checkNotNull(resource); FileUtils.copyURLToFile(resource, schemaFile); resource = classLoader.getResource("examples/stream/githubEvents/pullRequestMergedEvents_realtime_table_config.json"); Preconditions.checkNotNull(resource); FileUtils.copyURLToFile(resource, tableConfigFile); File tempDir = new File(FileUtils.getTempDirectory(), String.valueOf(System.currentTimeMillis())); Preconditions.checkState(tempDir.mkdirs()); QuickstartTableRequest request = new QuickstartTableRequest("pullRequestMergedEvents", schemaFile, tableConfigFile); final QuickstartRunner runner = new QuickstartRunner(Lists.newArrayList(request), 1, 1, 1, tempDir); printStatus(Color.CYAN, "***** Starting Kafka *****"); startKafka(); printStatus(Color.CYAN, "***** Starting zookeeper, controller, server and broker *****"); runner.startAll(); printStatus(Color.CYAN, "***** Adding pullRequestMergedEvents table *****"); runner.addTable(); printStatus(Color.CYAN, "***** Starting pullRequestMergedEvents data stream and publishing to Kafka *****"); final PullRequestMergedEventsStream pullRequestMergedEventsStream = new PullRequestMergedEventsStream(schemaFile.getAbsolutePath(), "pullRequestMergedEvents", KafkaStarterUtils.DEFAULT_KAFKA_BROKER, personalAccessToken); pullRequestMergedEventsStream.execute(); printStatus(Color.CYAN, "***** Waiting for 10 seconds for a few events to get populated *****"); Thread.sleep(10000); Runtime.getRuntime().addShutdownHook(new Thread(() -> { try { printStatus(Color.GREEN, "***** Shutting down GitHubEventsQuickStart *****"); runner.stop(); _kafkaStarter.stop(); ZkStarter.stopLocalZkServer(_zookeeperInstance); FileUtils.deleteDirectory(quickStartDataDir); } catch (Exception e) { LOGGER.error("Caught exception in shutting down GitHubEvents QuickStart", e); } })); printStatus(Color.YELLOW, "***** Realtime github demo quickstart setup complete *****"); String q1 = "select count(*) from pullRequestMergedEvents limit 0"; printStatus(Color.YELLOW, "Total number of documents in the table"); printStatus(Color.CYAN, "Query : " + q1); printStatus(Color.YELLOW, prettyPrintResponse(runner.runQuery(q1))); printStatus(Color.GREEN, "***************************************************"); String q2 = "select sum(numLinesAdded) from pullRequestMergedEvents group by repo top 10 limit 0"; printStatus(Color.YELLOW, "Top 10 repo with the most lines added"); printStatus(Color.CYAN, "Query : " + q2); printStatus(Color.YELLOW, prettyPrintResponse(runner.runQuery(q2))); printStatus(Color.GREEN, "***************************************************"); String q3 = "select * from pullRequestMergedEvents where authorAssociation = 'COLLABORATOR' limit 10"; printStatus(Color.YELLOW, "Show data for COLLABORATORS"); printStatus(Color.CYAN, "Query : " + q3); printStatus(Color.YELLOW, prettyPrintResponse(runner.runQuery(q3))); printStatus(Color.GREEN, "***************************************************"); String q4 = "select max(elapsedTimeMillis) from pullRequestMergedEvents group by repo top 10 limit 0"; printStatus(Color.YELLOW, "Show repos with longest alive pull requests"); printStatus(Color.CYAN, "Query : " + q4); printStatus(Color.YELLOW, prettyPrintResponse(runner.runQuery(q4))); printStatus(Color.GREEN, "***************************************************"); String q5 = "select count(*) from pullRequestMergedEvents"; printStatus(Color.YELLOW, "Total number of documents in the table"); printStatus(Color.CYAN, "Query : " + q5); printStatus(Color.YELLOW, prettyPrintResponse(runner.runQuery(q5))); printStatus(Color.GREEN, "***************************************************"); printStatus(Color.GREEN, "You can always go to http://localhost:9000/query/ to play around in the query console"); }