Java Code Examples for org.apache.lucene.util.TestUtil#nextLong()
The following examples show how to use
org.apache.lucene.util.TestUtil#nextLong() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BaseNormsFormatTestCase.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testSparseNCommon() throws Exception { assumeTrue("Requires sparse norms support", codecSupportsSparsity()); final Random r = random(); final int N = TestUtil.nextInt(r, 2, 15); final long[] commonValues = new long[N]; for (int j = 0; j < N; ++j) { commonValues[j] = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE); } final int numOtherValues = TestUtil.nextInt(r, 2, 256 - N); final long[] otherValues = new long[numOtherValues]; for (int j = 0; j < numOtherValues; ++j) { otherValues[j] = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE); } doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() { @Override public long getAsLong() { return r.nextInt(100) == 0 ? otherValues[r.nextInt(numOtherValues - 1)] : commonValues[r.nextInt(N - 1)]; } }); }
Example 2
Source File: TestTimeLimitingCollector.java From lucene-solr with Apache License 2.0 | 6 votes |
private void doTestSearch() { int totalResults = 0; int totalTLCResults = 0; try { MyHitCollector myHc = new MyHitCollector(); search(myHc); totalResults = myHc.hitCount(); myHc = new MyHitCollector(); long oneHour = 3600000; long duration = TestUtil.nextLong(random(), oneHour, Long.MAX_VALUE); Collector tlCollector = createTimedCollector(myHc, duration, false); search(tlCollector); totalTLCResults = myHc.hitCount(); } catch (Exception e) { e.printStackTrace(); assertTrue("Unexpected exception: "+e, false); //==fail } assertEquals("Wrong number of results!", totalResults, totalTLCResults); }
Example 3
Source File: TestLongRangeFieldQueries.java From lucene-solr with Apache License 2.0 | 6 votes |
private long nextLongInternal() { switch (random().nextInt(5)) { case 0: return Long.MIN_VALUE; case 1: return Long.MAX_VALUE; default: int bpv = random().nextInt(64); switch (bpv) { case 64: return random().nextLong(); default: long v = TestUtil.nextLong(random(), 0, (1L << bpv) - 1); if (bpv > 0) { // negative values sometimes v -= 1L << (bpv - 1); } return v; } } }
Example 4
Source File: TestDirectMonotonic.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testMonotonicBinarySearchRandom() throws IOException { try (Directory dir = newDirectory()) { final int iters = atLeast(100); for (int iter = 0; iter < iters; ++iter) { final int arrayLength = random().nextInt(1 << random().nextInt(14)); final long[] array = new long[arrayLength]; final long base = random().nextLong(); final int bpv = TestUtil.nextInt(random(), 4, 61); for (int i = 0; i < array.length; ++i) { array[i] = base + TestUtil.nextLong(random(), 0, (1L << bpv) - 1); } Arrays.sort(array); doTestMonotonicBinarySearchAgainstLongArray(dir, array, TestUtil.nextInt(random(), 2, 10)); } } }
Example 5
Source File: TestPackedInts.java From lucene-solr with Apache License 2.0 | 6 votes |
@Ignore public void testPagedGrowableWriterOverflow() { final long size = TestUtil.nextLong(random(), 2 * (long) Integer.MAX_VALUE, 3 * (long) Integer.MAX_VALUE); final int pageSize = 1 << (TestUtil.nextInt(random(), 16, 30)); final PagedGrowableWriter writer = new PagedGrowableWriter(size, pageSize, 1, random().nextFloat()); final long index = TestUtil.nextLong(random(), (long) Integer.MAX_VALUE, size - 1); writer.set(index, 2); assertEquals(2, writer.get(index)); for (int i = 0; i < 1000000; ++i) { final long idx = TestUtil.nextLong(random(), 0, size); if (idx == index) { assertEquals(2, writer.get(idx)); } else { assertEquals(0, writer.get(idx)); } } }
Example 6
Source File: FileDictionaryTest.java From lucene-solr with Apache License 2.0 | 6 votes |
private Map.Entry<List<String>, String> generateFileEntry(String fieldDelimiter, boolean hasWeight, boolean hasPayload) { List<String> entryValues = new ArrayList<>(); StringBuilder sb = new StringBuilder(); String term = TestUtil.randomSimpleString(random(), 1, 300); sb.append(term); entryValues.add(term); if (hasWeight) { sb.append(fieldDelimiter); long weight = TestUtil.nextLong(random(), Long.MIN_VALUE, Long.MAX_VALUE); sb.append(weight); entryValues.add(String.valueOf(weight)); } if (hasPayload) { sb.append(fieldDelimiter); String payload = TestUtil.randomSimpleString(random(), 1, 300); sb.append(payload); entryValues.add(payload); } sb.append("\n"); return new SimpleEntry<>(entryValues, sb.toString()); }
Example 7
Source File: TestSolrXml.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testFailAtConfigParseTimeWhenIntTypeIsExpectedAndLongTypeIsGiven() { long val = TestUtil.nextLong(random(), Integer.MAX_VALUE, Long.MAX_VALUE); String solrXml = String.format(Locale.ROOT, "<solr><solrcloud><long name=\"maxUpdateConnections\">%d</long></solrcloud></solr>", val); expectedException.expect(SolrException.class); expectedException.expectMessage("Error parsing 'maxUpdateConnections'"); SolrXmlConfig.fromString(solrHome, solrXml); // return not used, only for validation }
Example 8
Source File: BaseNormsFormatTestCase.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testOutliers2() throws Exception { int iterations = atLeast(1); final Random r = random(); for (int i = 0; i < iterations; i++) { final long commonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE); final long uncommonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE); doTestNormsVersusDocValues(1, new LongSupplier() { @Override public long getAsLong() { return r.nextInt(100) == 0 ? uncommonValue : commonValue; } }); } }
Example 9
Source File: BaseNormsFormatTestCase.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testSparseOutliers2() throws Exception { assumeTrue("Requires sparse norms support", codecSupportsSparsity()); int iterations = atLeast(1); final Random r = random(); for (int i = 0; i < iterations; i++) { final long commonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE); final long uncommonValue = TestUtil.nextLong(r, Byte.MIN_VALUE, Byte.MAX_VALUE); doTestNormsVersusDocValues(random().nextDouble(), new LongSupplier() { @Override public long getAsLong() { return r.nextInt(100) == 0 ? uncommonValue : commonValue; } }); } }
Example 10
Source File: TestPackedInts.java From lucene-solr with Apache License 2.0 | 5 votes |
private static void fill(PackedInts.Mutable packedInt, int bitsPerValue, long randomSeed) { Random rnd2 = new Random(randomSeed); final long maxValue = bitsPerValue == 64 ? Long.MAX_VALUE : (1L << bitsPerValue) - 1; for (int i = 0 ; i < packedInt.size() ; i++) { long value = bitsPerValue == 64 ? random().nextLong() : TestUtil.nextLong(rnd2, 0, maxValue); packedInt.set(i, value); assertEquals(String.format(Locale.ROOT, "The set/get of the value at index %d should match for %s", i, packedInt.getClass().getSimpleName()), value, packedInt.get(i)); } }
Example 11
Source File: TestSleepingLockWrapper.java From lucene-solr with Apache License 2.0 | 5 votes |
@Override protected Directory getDirectory(Path path) throws IOException { long lockWaitTimeout = TestUtil.nextLong(random(), 20, 100); long pollInterval = TestUtil.nextLong(random(), 2, 10); int which = random().nextInt(3); switch (which) { case 0: return new SleepingLockWrapper(newDirectory(random(), new SingleInstanceLockFactory()), lockWaitTimeout, pollInterval); case 1: return new SleepingLockWrapper(newFSDirectory(path), lockWaitTimeout, pollInterval); default: return new SleepingLockWrapper(newFSDirectory(path), lockWaitTimeout, pollInterval); } }
Example 12
Source File: TestDistributedStatsComponentCardinality.java From lucene-solr with Apache License 2.0 | 5 votes |
public TestDistributedStatsComponentCardinality() { // we need DVs on point fields to compute stats if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); // we want some randomness in the shard number, but we don't want multiple iterations fixShardCount(TEST_NIGHTLY ? 7 : random().nextInt(3) + 1); handle.put("maxScore", SKIPVAL); NUM_DOCS = TestUtil.nextInt(random(), 10000, 15000); MAX_LONG = TestUtil.nextLong(random(), 0, NUM_DOCS * BIG_PRIME); MIN_LONG = MAX_LONG - (((long)NUM_DOCS-1) * BIG_PRIME); }
Example 13
Source File: TestLucene86PointsFormat.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testRandomDocCount() { for (int i = 0; i < 100; i++) { long size = TestUtil.nextLong(random(), 1, Long.MAX_VALUE); int maxDoc = (size > Integer.MAX_VALUE) ? Integer.MAX_VALUE : Math.toIntExact(size); int docCount = TestUtil.nextInt(random(), 1, maxDoc); long estimatedPointCount = TestUtil.nextLong(random(), 0, size); PointValues values = getPointValues(size, docCount, estimatedPointCount); long docs = values.estimateDocCount(null); assertTrue(docs <= estimatedPointCount); assertTrue(docs <= maxDoc); assertTrue(docs >= estimatedPointCount / (size/docCount)); } }
Example 14
Source File: TestLucene60PointsFormat.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testRandomDocCount() { for (int i = 0; i < 100; i++) { long size = TestUtil.nextLong(random(), 1, Long.MAX_VALUE); int maxDoc = (size > Integer.MAX_VALUE) ? Integer.MAX_VALUE : Math.toIntExact(size); int docCount = TestUtil.nextInt(random(), 1, maxDoc); long estimatedPointCount = TestUtil.nextLong(random(), 0, size); PointValues values = getPointValues(size, docCount, estimatedPointCount); long docs = values.estimateDocCount(null); assertTrue(docs <= estimatedPointCount); assertTrue(docs <= maxDoc); assertTrue(docs >= estimatedPointCount / (size/docCount)); } }
Example 15
Source File: TestSegmentInfos.java From lucene-solr with Apache License 2.0 | 4 votes |
public void testBitFlippedTriggersCorruptIndexException() throws IOException { BaseDirectoryWrapper dir = newDirectory(); dir.setCheckIndexOnClose(false); byte id[] = StringHelper.randomId(); Codec codec = Codec.getDefault(); SegmentInfos sis = new SegmentInfos(Version.LATEST.major); SegmentInfo info = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_0", 1, false, Codec.getDefault(), Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null); info.setFiles(Collections.<String>emptySet()); codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT); SegmentCommitInfo commitInfo = new SegmentCommitInfo(info, 0, 0, -1, -1, -1, StringHelper.randomId()); sis.add(commitInfo); info = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_1", 1, false, Codec.getDefault(), Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null); info.setFiles(Collections.<String>emptySet()); codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT); commitInfo = new SegmentCommitInfo(info, 0, 0,-1, -1, -1, StringHelper.randomId()); sis.add(commitInfo); sis.commit(dir); BaseDirectoryWrapper corruptDir = newDirectory(); corruptDir.setCheckIndexOnClose(false); boolean corrupt = false; for (String file : dir.listAll()) { if (file.startsWith(IndexFileNames.SEGMENTS)) { try (IndexInput in = dir.openInput(file, IOContext.DEFAULT); IndexOutput out = corruptDir.createOutput(file, IOContext.DEFAULT)) { final long corruptIndex = TestUtil.nextLong(random(), 0, in.length() - 1); out.copyBytes(in, corruptIndex); final int b = Byte.toUnsignedInt(in.readByte()) + TestUtil.nextInt(random(), 0x01, 0xff); out.writeByte((byte) b); out.copyBytes(in, in.length() - in.getFilePointer()); } try (IndexInput in = corruptDir.openInput(file, IOContext.DEFAULT)) { CodecUtil.checksumEntireFile(in); if (VERBOSE) { System.out.println("TEST: Altering the file did not update the checksum, aborting..."); } return; } catch (CorruptIndexException e) { // ok } corrupt = true; } else if (ExtrasFS.isExtra(file) == false) { corruptDir.copyFrom(dir, file, file, IOContext.DEFAULT); } } assertTrue("No segments file found", corrupt); expectThrowsAnyOf( Arrays.asList(CorruptIndexException.class, IndexFormatTooOldException.class, IndexFormatTooNewException.class), () -> SegmentInfos.readLatestCommit(corruptDir)); dir.close(); corruptDir.close(); }
Example 16
Source File: TestPackedInts.java From lucene-solr with Apache License 2.0 | 4 votes |
@Nightly public void testBlockReaderOverflow() throws IOException { final long valueCount = TestUtil.nextLong(random(), 1L + Integer.MAX_VALUE, (long) Integer.MAX_VALUE * 2); final int blockSize = 1 << TestUtil.nextInt(random(), 20, 22); final Directory dir = newDirectory(); final IndexOutput out = dir.createOutput("out.bin", IOContext.DEFAULT); final BlockPackedWriter writer = new BlockPackedWriter(out, blockSize); long value = random().nextInt() & 0xFFFFFFFFL; long valueOffset = TestUtil.nextLong(random(), 0, valueCount - 1); for (long i = 0; i < valueCount; ) { assertEquals(i, writer.ord()); if ((i & (blockSize - 1)) == 0 && (i + blockSize < valueOffset || i > valueOffset && i + blockSize < valueCount)) { writer.addBlockOfZeros(); i += blockSize; } else if (i == valueOffset) { writer.add(value); ++i; } else { writer.add(0); ++i; } } writer.finish(); out.close(); final IndexInput in = dir.openInput("out.bin", IOContext.DEFAULT); final BlockPackedReaderIterator it = new BlockPackedReaderIterator(in, PackedInts.VERSION_CURRENT, blockSize, valueCount); it.skip(valueOffset); assertEquals(value, it.next()); in.seek(0L); final BlockPackedReader reader = new BlockPackedReader(in, PackedInts.VERSION_CURRENT, blockSize, valueCount, random().nextBoolean()); assertEquals(value, reader.get(valueOffset)); for (int i = 0; i < 5; ++i) { final long offset = TestUtil.nextLong(random(), 0, valueCount - 1); if (offset == valueOffset) { assertEquals(value, reader.get(offset)); } else { assertEquals(0, reader.get(offset)); } } in.close(); dir.close(); }
Example 17
Source File: TestDocValuesQueries.java From lucene-solr with Apache License 2.0 | 4 votes |
private void doTestDuelPointRangeNumericRangeQuery(boolean sortedNumeric, int maxValuesPerDoc) throws IOException { final int iters = atLeast(10); for (int iter = 0; iter < iters; ++iter) { Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir); final int numDocs = atLeast(100); for (int i = 0; i < numDocs; ++i) { Document doc = new Document(); final int numValues = TestUtil.nextInt(random(), 0, maxValuesPerDoc); for (int j = 0; j < numValues; ++j) { final long value = TestUtil.nextLong(random(), -100, 10000); if (sortedNumeric) { doc.add(new SortedNumericDocValuesField("dv", value)); } else { doc.add(new NumericDocValuesField("dv", value)); } doc.add(new LongPoint("idx", value)); } iw.addDocument(doc); } if (random().nextBoolean()) { iw.deleteDocuments(LongPoint.newRangeQuery("idx", 0L, 10L)); } final IndexReader reader = iw.getReader(); final IndexSearcher searcher = newSearcher(reader, false); iw.close(); for (int i = 0; i < 100; ++i) { final long min = random().nextBoolean() ? Long.MIN_VALUE : TestUtil.nextLong(random(), -100, 10000); final long max = random().nextBoolean() ? Long.MAX_VALUE : TestUtil.nextLong(random(), -100, 10000); final Query q1 = LongPoint.newRangeQuery("idx", min, max); final Query q2; if (sortedNumeric) { q2 = SortedNumericDocValuesField.newSlowRangeQuery("dv", min, max); } else { q2 = NumericDocValuesField.newSlowRangeQuery("dv", min, max); } assertSameMatches(searcher, q1, q2, false); } reader.close(); dir.close(); } }
Example 18
Source File: TestLegacyFieldCache.java From lucene-solr with Apache License 2.0 | 4 votes |
public void testLongFieldCache() throws IOException { Directory dir = newDirectory(); IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random())); cfg.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg); Document doc = new Document(); LegacyLongField field = new LegacyLongField("f", 0L, Store.YES); doc.add(field); final long[] values = new long[TestUtil.nextInt(random(), 1, 10)]; Set<Integer> missing = new HashSet<>(); for (int i = 0; i < values.length; ++i) { final long v; switch (random().nextInt(10)) { case 0: v = Long.MIN_VALUE; break; case 1: v = 0; break; case 2: v = Long.MAX_VALUE; break; default: v = TestUtil.nextLong(random(), -10, 10); break; } values[i] = v; if (v == 0 && random().nextBoolean()) { // missing iw.addDocument(new Document()); missing.add(i); } else { field.setLongValue(v); iw.addDocument(doc); } } iw.forceMerge(1); final DirectoryReader reader = iw.getReader(); final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.LEGACY_LONG_PARSER); for (int i = 0; i < values.length; ++i) { if (missing.contains(i) == false) { assertEquals(i, longs.nextDoc()); assertEquals(values[i], longs.longValue()); } } assertEquals(NO_MORE_DOCS, longs.nextDoc()); reader.close(); iw.close(); dir.close(); }
Example 19
Source File: BaseSimilarityTestCase.java From lucene-solr with Apache License 2.0 | 4 votes |
/** * returns new random term, that fits within the bounds of the corpus */ static TermStatistics newTerm(Random random, CollectionStatistics corpus) { final long docFreq; switch (random.nextInt(3)) { case 0: // rare term docFreq = 1; break; case 1: // common term docFreq = corpus.docCount(); break; default: // random specificity docFreq = TestUtil.nextLong(random, 1, corpus.docCount()); break; } final long totalTermFreq; // can't require docs to have > 2B tokens long upperBound; try { upperBound = Math.min(corpus.sumTotalTermFreq(), Math.multiplyExact(docFreq, Integer.MAX_VALUE)); } catch (ArithmeticException overflow) { upperBound = corpus.sumTotalTermFreq(); } if (corpus.sumTotalTermFreq() == corpus.sumDocFreq()) { // omitTF totalTermFreq = docFreq; } else { switch (random.nextInt(3)) { case 0: // no repetition totalTermFreq = docFreq; break; case 1: // maximum repetition totalTermFreq = upperBound; break; default: // random repetition totalTermFreq = TestUtil.nextLong(random, docFreq, upperBound); break; } } return new TermStatistics(TERM, docFreq, totalTermFreq); }
Example 20
Source File: TestAllFilesDetectBitFlips.java From lucene-solr with Apache License 2.0 | 4 votes |
private void corruptFile(Directory dir, String victim) throws IOException { try (BaseDirectoryWrapper dirCopy = newDirectory()) { dirCopy.setCheckIndexOnClose(false); long victimLength = dir.fileLength(victim); long flipOffset = TestUtil.nextLong(random(), 0, victimLength - 1); if (VERBOSE) { System.out.println("TEST: now corrupt file " + victim + " by changing byte at offset " + flipOffset + " (length= " + victimLength + ")"); } for(String name : dir.listAll()) { if (name.equals(victim) == false) { dirCopy.copyFrom(dir, name, name, IOContext.DEFAULT); } else { try (IndexOutput out = dirCopy.createOutput(name, IOContext.DEFAULT); IndexInput in = dir.openInput(name, IOContext.DEFAULT)) { out.copyBytes(in, flipOffset); out.writeByte((byte) (in.readByte() + TestUtil.nextInt(random(), 0x01, 0xFF))); out.copyBytes(in, victimLength - flipOffset - 1); } try (IndexInput in = dirCopy.openInput(name, IOContext.DEFAULT)) { try { CodecUtil.checksumEntireFile(in); System.out.println("TEST: changing a byte in " + victim + " did not update the checksum)"); return; } catch (CorruptIndexException e) { // ok } } } dirCopy.sync(Collections.singleton(name)); } // corruption must be detected expectThrowsAnyOf(Arrays.asList(CorruptIndexException.class, IndexFormatTooOldException.class, IndexFormatTooNewException.class), () -> { try (IndexReader reader = DirectoryReader.open(dirCopy)) { for (LeafReaderContext context : reader.leaves()) { context.reader().checkIntegrity(); } } } ); } }