net.openhft.chronicle.map.ChronicleMapBuilder Java Examples

The following examples show how to use net.openhft.chronicle.map.ChronicleMapBuilder. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OffHeapVarBitMetricStore.java    From yuvi with Apache License 2.0 6 votes vote down vote up
public OffHeapVarBitMetricStore(long size, int valueSize, String chunkInfo, String dir) {
  this.chunkInfo = chunkInfo;

  ChronicleMapBuilder<LongValue, ByteBuffer> mapBuilder = ChronicleMap
      .of(LongValue.class, ByteBuffer.class)
      .entries(size)
      .averageValueSize(valueSize);

  if (chunkInfo != null && !chunkInfo.isEmpty() && !dir.isEmpty()) {
    File offHeapFile = new File(dir + "/" + offHeapNamePrefix + "_" + chunkInfo);
    try {
      timeSeries = mapBuilder.name(offHeapNamePrefix + "_" + chunkInfo)
          .createPersistedTo(offHeapFile);
    } catch (IOException e) {
      LOG.error("Failed to create an offheap store {} with error {}", offHeapFile, e.getMessage());
      throw new IllegalArgumentException("Failed to create an off heap store.", e);
    }
  } else {
    timeSeries = mapBuilder.name(offHeapNamePrefix).create();
  }
  LOG.info("Created an off heap metric store of size={} valueSize={} chunkInfo={} in dir={}",
      size, valueSize, chunkInfo, dir);
}
 
Example #2
Source File: DoubleArrayTest.java    From Chronicle-Map with Apache License 2.0 6 votes vote down vote up
@Test
@Ignore("TODO What is HACK???")
public void addToAMap2() {
    DoubleArray.HACK = false;
    DoubleArray a = new DoubleArray(10);
    a.setData(new double[]{1, 2, 3, 4, 5});

    DoubleArray b = new DoubleArray(10);
    b.setData(new double[]{5, 6, 7, 8, 9});

    ChronicleMap<Integer, DoubleArray> proxyMap = ChronicleMapBuilder
            .of(Integer.class, DoubleArray.class)
            .averageValueSize(6 * 8)
            .create();
    proxyMap.put(1, a);
    proxyMap.put(2, b);

    System.out.println(proxyMap.get(1));
    System.out.println(proxyMap.get(2));
    proxyMap.close();
    DoubleArray.HACK = true;
}
 
Example #3
Source File: ChronicleMapStateRepository.java    From synapse with Apache License 2.0 6 votes vote down vote up
public ChronicleMapStateRepository<V> build() {

            if (chronicleMapBuilder == null) {
                chronicleMapBuilder = ChronicleMapBuilder.of(String.class, clazz)
                        .averageKeySize(DEFAULT_KEY_SIZE_BYTES)
                        .averageValueSize(DEFAULT_VALUE_SIZE_BYTES)
                        .entries(DEFAULT_ENTRY_COUNT);
            }

            boolean doesClassNeedToBeSerialized = clazz != String.class;
            if (!customValueMarshaller && doesClassNeedToBeSerialized) {
                chronicleMapBuilder.valueMarshaller(new ChronicleMapBytesMarshaller<>(objectMapper, clazz));
            }

            return new ChronicleMapStateRepository<>(name, chronicleMapBuilder.create());
        }
 
Example #4
Source File: ExternalizableTest.java    From Chronicle-Map with Apache License 2.0 6 votes vote down vote up
@Test
public void externalizable() throws IOException {
    String path = OS.TARGET + "/test-" + System.nanoTime() + ".map";
    new File(path).deleteOnExit();
    try (ChronicleMap<Long, SomeClass> storage = ChronicleMapBuilder
            .of(Long.class, SomeClass.class)
            .averageValueSize(128)
            .entries(128)
            .createPersistedTo(new File(path))) {
        SomeClass value = new SomeClass();
        value.hits.add("one");
        value.hits.add("two");
        storage.put(1L, value);

        SomeClass value2 = storage.get(1L);
        assertEquals(value.hits, value2.hits);
    }
}
 
Example #5
Source File: StringArrayExample.java    From Chronicle-Map with Apache License 2.0 6 votes vote down vote up
@Test
   public void examplePutAndGet() {
       ChronicleMap<Integer, CharSequenceArray> map = ChronicleMapBuilder
               .of(Integer.class, CharSequenceArray.class)
               .entries(100)
               .create();
       {
           CharSequenceArray charSequenceArray = Values.newHeapInstance(CharSequenceArray.class);
           map.put(1, charSequenceArray);
       }
       {
           // compute - change the value in the array
           map.compute(1, this::setToHello);
       }

       {
           // get - read the value
           CharSequence charSequence = map.getUsing(1, charSequenceArray).getCharSequenceWrapperAt(1).getCharSequence();
           System.out.println(charSequence);
       }

       {
           // to string all the values
           System.out.println(map.getUsing(1, charSequenceArray).toString());
       }
}
 
Example #6
Source File: OffHeapByteArrayExampleTest.java    From Chronicle-Map with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void beforeClass() {
    chm = ChronicleMapBuilder
            .of(LongValue.class, ByteArray.class)
            .entries(1000)
            .create();
}
 
Example #7
Source File: ArrayTest.java    From Chronicle-Map with Apache License 2.0 5 votes vote down vote up
@Test
public void test0() throws IOException, JSONException{
    ClassAliasPool.CLASS_ALIASES.addAlias(MovingAverageArray.class);

    File file = new File(OS.getTarget() + "/pf-PosistionsAndClose-" + System.nanoTime());

    ChronicleMap<Long, MovingAverageArray> mapWrite = ChronicleMap
            .of(Long.class, MovingAverageArray.class)
            .entries(100)
            .averageValue(createSampleWithSize(6, 0))
            .createPersistedTo(file);
    mapWrite.put(1L, createSampleWithSize(6, 1));
    mapWrite.put(2L, createSampleWithSize(4, 2));
    mapWrite.close();

    ChronicleMap<Long, MovingAverageArray> mapRead = ChronicleMapBuilder
            .of(Long.class, MovingAverageArray.class)
            .createPersistedTo(file);
    MovingAverageArray m = mapRead.get(1L);
    assertJSONEqualsAfterPrefix("!MovingAverageArray {\n" +
            "  values: [\n" +
            "    { movingAverage: 0.1, high: 0.1, low: 0.1, stdDev: 0.1 },\n" +
            "    { movingAverage: 1.1, high: 1.1, low: 1.1, stdDev: 1.1 },\n" +
            "    { movingAverage: 2.1, high: 2.1, low: 2.1, stdDev: 2.1 },\n" +
            "    { movingAverage: 3.1, high: 3.1, low: 3.1, stdDev: 3.1 },\n" +
            "    { movingAverage: 4.1, high: 4.1, low: 4.1, stdDev: 4.1 },\n" +
            "    { movingAverage: 5.1, high: 5.1, low: 5.1, stdDev: 5.1 }\n" +
            "  ]\n" +
            "}\n", m.toString());
    MovingAverageArray m2 = mapRead.getUsing(2L, m);
    assertSame(m, m2); // object is recycled, so no objects are created.
    assertJSONEqualsAfterPrefix("!MovingAverageArray {\n" +
            "  values: [\n" +
            "    { movingAverage: 0.2, high: 0.2, low: 0.2, stdDev: 0.2 },\n" +
            "    { movingAverage: 1.2, high: 1.2, low: 1.2, stdDev: 1.2 },\n" +
            "    { movingAverage: 2.2, high: 2.2, low: 2.2, stdDev: 2.2 },\n" +
            "    { movingAverage: 3.2, high: 3.2, low: 3.2, stdDev: 3.2 }\n" +
            "  ]\n" +
            "}\n", m.toString());
}
 
Example #8
Source File: MapCheck.java    From Chronicle-Map with Apache License 2.0 5 votes vote down vote up
static Map newMap() {
    try {
        return ChronicleMapBuilder.of(Object.class, Object.class).create();
    } catch (Exception e) {
        throw new RuntimeException("Can't instantiate CHM : " + e);
    }
}
 
Example #9
Source File: PingPongCASLeft.java    From Chronicle-Map with Apache License 2.0 5 votes vote down vote up
static ChronicleMap<String, BondVOInterface> acquireCHM() throws IOException {
    // ensure thread ids are globally unique.
    AffinitySupport.setThreadId();
    return ChronicleMapBuilder.of(String.class, BondVOInterface.class)
            .entries(16)
            .averageKeySize("369604101".length()).create();
}
 
Example #10
Source File: ProcessInstanceLimiter.java    From Chronicle-Map with Apache License 2.0 5 votes vote down vote up
/**
 * Create a ProcessInstanceLimiter instance using the default shared file
 * named ProcessInstanceLimiter_DEFAULT_SHARED_MAP_ in the tmp directory.
 *
 * @param sharedMapPath - The path to a file which will be used to store the shared
 *                      map (the file need not pre-exist)
 * @param callback      - An instance of the Callback interface, which will receive
 *                      callbacks
 * @throws IOException - if the default shared file cannot be created
 */
public ProcessInstanceLimiter(String sharedMapPath, Callback callback) throws IOException {
    this.sharedMapPath = sharedMapPath;
    this.callback = callback;
    ChronicleMapBuilder<String, Data> builder =
            ChronicleMapBuilder.of(String.class, Data.class);
    builder.entries(1000);
    builder.averageKeySize((DEFAULT_PROCESS_NAME + "#").length());
    this.theSharedMap = builder.createPersistedTo(new File(sharedMapPath));
    Thread t = new Thread(this, "ProcessInstanceLimiter updater");
    t.setDaemon(true);
    t.start();
}
 
Example #11
Source File: PointListSerializationTest.java    From Chronicle-Map with Apache License 2.0 5 votes vote down vote up
@Test
public void testComplexSerialization() {
    try (ChronicleMap<String, A> map = ChronicleMapBuilder
            .of(String.class, A.class)
            .valueMarshaller(AMarshaller.INSTANCE)
            .entries(5)
            .averageKeySize(4)
            .averageValueSize(1000)
            .create()) {
        A obj_A = new A();
        obj_A.str_ = "a";
        obj_A.list_ = new ArrayList<>();
        B b = new B();
        b.str_ = "b";
        obj_A.list_.add(b);
        map.put("KEY1", obj_A);
        map.get("KEY1");
    }
}
 
Example #12
Source File: PortfolioValueTest.java    From Chronicle-Map with Apache License 2.0 5 votes vote down vote up
@Test
public void test() throws ExecutionException, InterruptedException {
    ChronicleMapBuilder<LongValue, PortfolioAssetInterface> mapBuilder = ChronicleMapBuilder.of(LongValue.class, PortfolioAssetInterface.class).entries(nAssets);

    try (ChronicleMap<LongValue, PortfolioAssetInterface> cache = mapBuilder.create()) {
        createData(cache);

        // Compute multiple times to get an reasonable average compute time
        for (int i = 0; i < nRepetitions; i++) {
            computeValue(cache);
        }
    }
}
 
Example #13
Source File: ChronicleMapTest.java    From Chronicle-Map with Apache License 2.0 4 votes vote down vote up
static ChronicleMap<Integer, CharSequence> newShmIntString(int size) throws IOException {
    return ChronicleMapBuilder.of(Integer.class, CharSequence.class)
            .averageValueSize(1)
            .entries(size).create();
}
 
Example #14
Source File: ChronicleMapTest.java    From Chronicle-Map with Apache License 2.0 4 votes vote down vote up
static ChronicleMap<CharSequence, CharSequence> newStrStrMap(int size) throws IOException {
    return ChronicleMapBuilder.of(CharSequence.class, CharSequence.class)
            .averageKeySize(20).averageValueSize(20)
            .entries(size).create();
}
 
Example #15
Source File: VanillaChronicleHash.java    From Chronicle-Map with Apache License 2.0 4 votes vote down vote up
public VanillaChronicleHash(ChronicleMapBuilder<K, ?> builder) {
    // Version
    dataFileVersion = BuildVersion.version();

    createdOrInMemory = true;

    @SuppressWarnings({"deprecation", "unchecked"})
    ChronicleHashBuilderPrivateAPI<K, ?> privateAPI =
            (ChronicleHashBuilderPrivateAPI<K, ?>) builder.privateAPI();

    // Data model
    SerializationBuilder<K> keyBuilder = privateAPI.keyBuilder();
    keyClass = keyBuilder.tClass;
    keySizeMarshaller = keyBuilder.sizeMarshaller();
    keyReader = keyBuilder.reader();
    keyDataAccess = keyBuilder.dataAccess();

    actualSegments = privateAPI.actualSegments();
    hashSplitting = HashSplitting.forSegments(actualSegments);

    chunkSize = privateAPI.chunkSize();
    maxChunksPerEntry = privateAPI.maxChunksPerEntry();
    actualChunksPerSegmentTier = privateAPI.actualChunksPerSegmentTier();

    // Precomputed offsets and sizes for fast Context init
    segmentHeaderSize = privateAPI.segmentHeaderSize();

    tierHashLookupValueBits = valueBits(actualChunksPerSegmentTier);
    tierHashLookupKeyBits = keyBits(privateAPI.entriesPerSegment(), tierHashLookupValueBits);
    tierHashLookupSlotSize =
            entrySize(tierHashLookupKeyBits, tierHashLookupValueBits);
    if (!privateAPI.aligned64BitMemoryOperationsAtomic() && tierHashLookupSlotSize > 4) {
        throw new IllegalStateException("aligned64BitMemoryOperationsAtomic() == false, " +
                "but hash lookup slot is " + tierHashLookupSlotSize);
    }
    tierHashLookupCapacity = privateAPI.tierHashLookupCapacity();
    maxEntriesPerHashLookup = (long) (tierHashLookupCapacity * MAX_LOAD_FACTOR);
    tierHashLookupInnerSize = tierHashLookupCapacity * tierHashLookupSlotSize;
    tierHashLookupOuterSize = CACHE_LINES.align(tierHashLookupInnerSize, BYTES);

    tierFreeListInnerSize = LONGS.align(
            BYTES.alignAndConvert(actualChunksPerSegmentTier, BITS), BYTES);
    tierFreeListOuterSize = CACHE_LINES.align(tierFreeListInnerSize, BYTES);

    tierEntrySpaceInnerSize = chunkSize * actualChunksPerSegmentTier;
    tierEntrySpaceInnerOffset = privateAPI.segmentEntrySpaceInnerOffset();
    tierEntrySpaceOuterSize = CACHE_LINES.align(
            tierEntrySpaceInnerOffset + tierEntrySpaceInnerSize, BYTES);

    tierSize = tierSize();

    maxExtraTiers = privateAPI.maxExtraTiers();
    tiersInBulk = computeNumberOfTiersInBulk();
    log2TiersInBulk = Maths.intLog2(tiersInBulk);
    tierBulkInnerOffsetToTiers = computeTierBulkInnerOffsetToTiers(tiersInBulk);
    tierBulkSizeInBytes = computeTierBulkBytesSize(tiersInBulk);

    checksumEntries = privateAPI.checksumEntries();

    preShutdownAction = privateAPI.getPreShutdownAction();
    skipCloseOnExitHook = privateAPI.skipCloseOnExitHook();
}
 
Example #16
Source File: StateMachineTutorial.java    From Chronicle-Map with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) {
    ChronicleMap<Integer, StateMachineData> map = null;

    try {
        File dataFile = new File(System.getProperty("java.io.tmpdir"), "hft-state-machine");
        map = ChronicleMapBuilder.of(Integer.class, StateMachineData.class)
                .entries(8).create();

        if (args.length > 0) {
            if ("0".equalsIgnoreCase(args[0])) {
                StateMachineData smd =
                        map.acquireUsing(0, new StateMachineData());

                StateMachineState st = smd.getState();
                if (st == StateMachineState.STATE_0) {
                    long start = System.nanoTime();

                    //fire the first state change
                    smd.setStateData(0);
                    smd.setState(StateMachineState.STATE_0, StateMachineState.STATE_1);

                    while (!smd.done()) {
                        // busy wait
                    }

                    long end = System.nanoTime();

                    LOGGER.info("Took {} us for 100 transiction", (end - start) / 1E3);
                }
            } else if ("1".equalsIgnoreCase(args[0])) {
                StateMachineProcessor.runProcessor(
                        map.acquireUsing(0, new StateMachineData()),
                        StateMachineState.STATE_1,
                        StateMachineState.STATE_1_WORKING,
                        StateMachineState.STATE_2);
            } else if ("2".equalsIgnoreCase(args[0])) {
                StateMachineProcessor.runProcessor(
                        map.acquireUsing(0, new StateMachineData()),
                        StateMachineState.STATE_2,
                        StateMachineState.STATE_2_WORKING,
                        StateMachineState.STATE_3);
            } else if ("3".equalsIgnoreCase(args[0])) {
                StateMachineProcessor.runProcessor(
                        map.acquireUsing(0, new StateMachineData()),
                        StateMachineState.STATE_3,
                        StateMachineState.STATE_3_WORKING,
                        StateMachineState.STATE_1);
            } else if ("clean".equalsIgnoreCase(args[0])) {
                LOGGER.info("deleting {}", dataFile.getAbsolutePath());
                dataFile.delete();
            }
        }
    } finally {
        if (map != null) {
            map.close();
        }
    }
}
 
Example #17
Source File: ChronicleMapRetryFailoverPolicy.java    From log4j2-elasticsearch with Apache License 2.0 4 votes vote down vote up
final ChronicleMapBuilder<CharSequence, ItemSource> defaultChronicleMapBuilder() {
    return ChronicleMap
            .of(CharSequence.class, ItemSource.class)
            .name(getClass().getName());
}
 
Example #18
Source File: ChronicleMapStateRepository.java    From synapse with Apache License 2.0 4 votes vote down vote up
public Builder<V> withMapBuilder(ChronicleMapBuilder<String, V> val) {
    chronicleMapBuilder = val;
    return this;
}
 
Example #19
Source File: DatasetTrackerChronicle.java    From tds with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
private void open() throws IOException {
  ChronicleMapBuilder<String, DatasetExt> builder = ChronicleMapBuilder.of(String.class, DatasetExt.class)
      .averageValueSize(200).entries(maxDatasets).averageKeySize(averagePathLength);
  datasetMap = builder.createPersistedTo(dbFile);
  changed = false;
}
 
Example #20
Source File: BuildVersion.java    From Chronicle-Map with Apache License 2.0 2 votes vote down vote up
/**
 * This should be used by everyone that has install chronicle map as a JAR
 *
 * @return gets the version out of the manifest, or null if it can not be read
 */
private static String getVersionFromManifest() {
    return ChronicleMapBuilder.class.getPackage().getImplementationVersion();
}