Java Code Examples for org.infinispan.Cache#get()
The following examples show how to use
org.infinispan.Cache#get() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ConcurrencyVersioningTest.java From keycloak with Apache License 2.0 | 6 votes |
/** * Test that if a put of an existing key is removed after the put and before tx commit, it is evicted * * @throws Exception */ @Test public void testGetRemovePutEternalOnExisting() throws Exception { final DefaultCacheManager cacheManager = getVersionedCacheManager(); ExecutorService executor = Executors.newSingleThreadExecutor(); RemoveThread removeThread = new RemoveThread(cacheManager); Cache<String, String> cache = cacheManager.getCache(InfinispanConnectionProvider.REALM_CACHE_NAME); cache.put("key", "value0"); startBatch(cache); cache.get("key"); executor.execute(removeThread); cache.putForExternalRead("key", "value1"); removeThread.getLatch().await(); try { endBatch(cache); // Assert.fail("Write skew should be detected"); } catch (Exception e) { } Assert.assertNull(cache.get("key")); Assert.assertTrue(removeThread.isSuccess()); }
Example 2
Source File: ConcurrencyVersioningTest.java From keycloak with Apache License 2.0 | 6 votes |
/** * Test that if a put of an existing key is removed after the put and before tx commit, it is evicted * * @throws Exception */ @Test public void testGetRemovePutOnExisting() throws Exception { final DefaultCacheManager cacheManager = getVersionedCacheManager(); ExecutorService executor = Executors.newSingleThreadExecutor(); RemoveThread removeThread = new RemoveThread(cacheManager); Cache<String, String> cache = cacheManager.getCache(InfinispanConnectionProvider.REALM_CACHE_NAME); cache.put("key", "value0"); startBatch(cache); cache.get("key"); executor.execute(removeThread); removeThread.getLatch().await(); cache.put("key", "value1"); try { endBatch(cache); Assert.fail("Write skew should be detected"); } catch (Exception e) { } Assert.assertNull(cache.get("key")); Assert.assertTrue(removeThread.isSuccess()); }
Example 3
Source File: ConcurrencyVersioningTest.java From keycloak with Apache License 2.0 | 6 votes |
/** * Tests that if remove executes before put, then put still succeeds. * * @throws Exception */ @Test public void testGetRemovePutOnNonExisting() throws Exception { final DefaultCacheManager cacheManager = getVersionedCacheManager(); ExecutorService executor = Executors.newSingleThreadExecutor(); RemoveThread removeThread = new RemoveThread(cacheManager); Cache<String, String> cache = cacheManager.getCache(InfinispanConnectionProvider.REALM_CACHE_NAME); cache.remove("key"); startBatch(cache); cache.get("key"); executor.execute(removeThread); removeThread.getLatch().await(); cache.putForExternalRead("key", "value1"); endBatch(cache); Assert.assertEquals(cache.get("key"), "value1"); Assert.assertTrue(removeThread.isSuccess()); }
Example 4
Source File: TestPersistence.java From hacep with Apache License 2.0 | 5 votes |
private void checkKey(String key, Cache<String, Object> cacheDeserialized, Cache<String, Object> cacheDeserialized2, Cache<String, Object> cacheDeserialized3) { logger.info("Checking key: "+key); long keyLong = Long.parseLong(key); Object o = cacheDeserialized.get(key); //XXX: needed waiting for https://issues.jboss.org/browse/ISPN-9200 if( o == null){ o = cacheDeserialized2.get(key); } if( o == null){ o = cacheDeserialized3.get(key); } /////////////////////////////////////////////////////////////////// Assert.assertTrue(o instanceof HAKieSerializedSession); HAKieSerializedSession haKieSerializedSession = (HAKieSerializedSession) o; HAKieSession sessionRebuilt = haKieSerializedSession.rebuild(); sessionRebuilt.insert(generateFactTenSecondsAfter(keyLong, 0L)); cacheDeserialized.put(key, sessionRebuilt); sessionRebuilt.insert(generateFactTenSecondsAfter(keyLong, 0L)); cacheDeserialized.put(key, sessionRebuilt); // 30 sec after - lock should be expired sessionRebuilt.insert(generateFactTenSecondsAfter(keyLong, 0L)); cacheDeserialized.put(key, sessionRebuilt); // And inserted again by this fact (expires in 25 sec) sessionRebuilt.insert(generateFactTenSecondsAfter(keyLong, 10L)); cacheDeserialized.put(key, sessionRebuilt); }
Example 5
Source File: L1SerializationIssueTest.java From keycloak with Apache License 2.0 | 5 votes |
private void readItems(Cache<String, Object> cache) { long start = System.currentTimeMillis(); for (int i=0 ; i < ITEMS_COUNT ; i++) { String key = "key-" + i; cache.get(key); } logger.infof("Read %d items in %d ms", ITEMS_COUNT, System.currentTimeMillis() - start); }
Example 6
Source File: OfflinePersistentUserSessionLoader.java From keycloak with Apache License 2.0 | 5 votes |
@Override public boolean isFinished(BaseCacheInitializer initializer) { Cache<String, Serializable> workCache = initializer.getWorkCache(); Boolean sessionsLoaded = (Boolean) workCache.get(PERSISTENT_SESSIONS_LOADED); if (sessionsLoaded != null && sessionsLoaded) { log.debugf("Persistent sessions loaded already."); return true; } else { log.debugf("Persistent sessions not yet loaded."); return false; } }
Example 7
Source File: InfinispanKeycloakTransaction.java From keycloak with Apache License 2.0 | 5 votes |
public <K, V> V get(Cache<K, V> cache, K key) { Object taskKey = getTaskKey(cache, key); CacheTask current = tasks.get(taskKey); if (current != null) { if (current instanceof CacheTaskWithValue) { return ((CacheTaskWithValue<V>) current).getValue(); } return null; } // Should we have per-transaction cache for lookups? return cache.get(key); }
Example 8
Source File: InfinispanSingletonCacheManagerDirectoryProvider.java From wallride with Apache License 2.0 | 5 votes |
public Address getLockOwner(String indexName, int affinityId, String lockName) { FileCacheKey fileCacheKey = new FileCacheKey(indexName, lockName, affinityId); Cache<?, Address> lockCache = cacheManager.getCache(lockingCacheName); Address address = lockCache.get(fileCacheKey); log.debugf("Lock owner for %s: %s", fileCacheKey, address); return address; }
Example 9
Source File: MyResource.java From thorntail with Apache License 2.0 | 5 votes |
@GET @Produces("text/plain") public String get() throws Exception { EmbeddedCacheManager cacheContainer = (EmbeddedCacheManager) new InitialContext().lookup("java:jboss/infinispan/container/server"); Cache<String,String> cache = cacheContainer.getCache("default"); if (cache.keySet().contains(key)) { return (String) cache.get(key); } String result = UUID.randomUUID().toString(); cache.put(key, result); return result; }
Example 10
Source File: ClusterTest.java From hacep with Apache License 2.0 | 5 votes |
@Test public void testEmptyHASession() { LOGGER.info("Start test empty HASessionID"); System.setProperty("grid.buffer", "10"); RulesConfigurationTestImpl rulesConfigurationTest = RulesTestBuilder.buildV1(); rulesConfigurationTest.registerChannel("additions", additionsChannel, replayChannel); RulesManager rulesManager = new RulesManager(rulesConfigurationTest); rulesManager.start(null, null, null); Cache<String, Object> cache1 = startNodes(2, rulesManager).getCache(); Cache<String, Object> cache2 = startNodes(2, rulesManager).getCache(); reset(replayChannel); String key = "1"; HAKieSession session1 = new HAKieSession(rulesManager, executorService); cache1.put(key, session1); HAKieSession serializedSessionCopy = (HAKieSession) cache2.get(key); Assert.assertNotNull(serializedSessionCopy); Assert.assertTrue(serializedSessionCopy.isSerialized()); reset(replayChannel, additionsChannel); HAKieSession session2 = ((HAKieSerializedSession) serializedSessionCopy).rebuild(); Assert.assertNotNull(session2); LOGGER.info("End test empty HASessionID"); rulesManager.stop(); }
Example 11
Source File: TestServlet.java From quarkus with Apache License 2.0 | 5 votes |
@Path("GET/{cacheName}/{id}") @GET @Produces(MediaType.TEXT_PLAIN) public String get(@PathParam("cacheName") String cacheName, @PathParam("id") String id) { log.info("Retrieving " + id + " from " + cacheName); Cache<byte[], byte[]> cache = emc.getCache(cacheName); byte[] result = cache.get(id.getBytes(StandardCharsets.UTF_8)); return result == null ? "null" : new String(result, StandardCharsets.UTF_8); }
Example 12
Source File: InvalidationMode.java From infinispan-simple-tutorials with Apache License 2.0 | 4 votes |
private static void getKey(Scanner scanner, Cache<String, String> cache) { System.out.println("# g -> Get key \n"); String key = readUserInput("Enter a key: ", scanner); String value = cache.get(key); System.out.println(String.format("%s key value is %s", key, value)); }
Example 13
Source File: TranscodingHelper.java From hibernate-demos with Apache License 2.0 | 4 votes |
public static <K, V> V getWithTranscoding(K key, Cache<K, V> cache) { Object protoValue = cache.get( keyToProto( key, cache ) ); return valueToJava( cache, protoValue ); }
Example 14
Source File: ClusterTest.java From hacep with Apache License 2.0 | 4 votes |
@Test public void testHASessionWithMaxBuffer() { System.setProperty("grid.buffer", "2"); LOGGER.info("Start test HASessionID with max buffer 2"); RulesConfigurationTestImpl rulesConfigurationTest = RulesTestBuilder.buildV1(); rulesConfigurationTest.registerChannel("additions", additionsChannel, replayChannel); RulesManager rulesManager = new RulesManager(rulesConfigurationTest); rulesManager.start(null, null, null); Cache<String, HAKieSession> cache1 = startNodes(2, rulesManager).getCache(); Cache<String, HAKieSession> cache2 = startNodes(2, rulesManager).getCache(); reset(replayChannel, additionsChannel); String key = "3"; HAKieSession session1 = new HAKieSession(rulesManager, executorService); cache1.put(key, session1); session1.insert(generateFactTenSecondsAfter(1L, 10L)); cache1.put(key, session1); session1.insert(generateFactTenSecondsAfter(1L, 20L)); cache1.put(key, session1); session1.insert(generateFactTenSecondsAfter(1L, 30L)); cache1.put(key, session1); InOrder inOrder = inOrder(additionsChannel); inOrder.verify(additionsChannel, times(1)).send(eq(10L)); inOrder.verify(additionsChannel, times(1)).send(eq(30L)); inOrder.verify(additionsChannel, times(1)).send(eq(60L)); inOrder.verifyNoMoreInteractions(); // Double check on total number of calls to the method send verify(additionsChannel, times(3)).send(any()); Object serializedSessionCopy = cache2.get(key); Assert.assertNotNull(serializedSessionCopy); Assert.assertTrue(HAKieSerializedSession.class.isAssignableFrom(serializedSessionCopy.getClass())); reset(replayChannel, additionsChannel); HAKieSession session2 = ((HAKieSerializedSession) serializedSessionCopy).rebuild(); session2.insert(generateFactTenSecondsAfter(1L, 40L)); verify(additionsChannel, times(1)).send(eq(100L)); // Double check on total number of calls to the method send verify(additionsChannel, times(1)).send(any()); LOGGER.info("End test HASessionID with max buffer 2"); rulesManager.stop(); }
Example 15
Source File: ClusterTest.java From hacep with Apache License 2.0 | 4 votes |
@Test public void testNonEmptyHASession() { System.setProperty("grid.buffer", "10"); LOGGER.info("Start test non empty HASessionID"); RulesConfigurationTestImpl rulesConfigurationTest = RulesTestBuilder.buildV1(); rulesConfigurationTest.registerChannel("additions", additionsChannel, replayChannel); RulesManager rulesManager = new RulesManager(rulesConfigurationTest); rulesManager.start(null, null, null); Cache<String, Object> cache1 = startNodes(2, rulesManager).getCache(); Cache<String, Object> cache2 = startNodes(2, rulesManager).getCache(); String key = "2"; HAKieSession session1 = new HAKieSession(rulesManager, executorService); cache1.put(key, session1); session1.insert(generateFactTenSecondsAfter(1L, 10L)); cache1.put(key, session1); session1.insert(generateFactTenSecondsAfter(1L, 20L)); cache1.put(key, session1); session1.insert(generateFactTenSecondsAfter(1L, 30L)); cache1.put(key, session1); verify(replayChannel, never()).send(any()); InOrder inOrder = inOrder(additionsChannel); inOrder.verify(additionsChannel, times(1)).send(eq(10L)); inOrder.verify(additionsChannel, times(1)).send(eq(30L)); inOrder.verify(additionsChannel, times(1)).send(eq(60L)); inOrder.verifyNoMoreInteractions(); // Double check on total number of calls to the method send verify(additionsChannel, times(3)).send(any()); Object serializedSessionCopy = cache2.get(key); Assert.assertNotNull(serializedSessionCopy); Assert.assertTrue(HAKieSerializedSession.class.isAssignableFrom(serializedSessionCopy.getClass())); reset(replayChannel, additionsChannel); HAKieSession session2 = ((HAKieSerializedSession) serializedSessionCopy).rebuild(); session2.insert(generateFactTenSecondsAfter(1L, 40L)); inOrder = inOrder(replayChannel); inOrder.verify(replayChannel, times(1)).send(eq(60L)); inOrder.verifyNoMoreInteractions(); // Double check on total number of calls to the method send verify(replayChannel, times(1)).send(any()); verify(additionsChannel, atMost(1)).send(any()); verify(additionsChannel, times(1)).send(eq(100L)); LOGGER.info("End test non empty HASessionID"); rulesManager.stop(); }
Example 16
Source File: TestModifiedRules.java From hacep with Apache License 2.0 | 4 votes |
@Test public void testNonEmptyHASession() throws IOException, URISyntaxException { System.setProperty("grid.buffer", "10"); logger.info("Start test modified rules"); RulesConfigurationTestImpl rulesConfigurationTest = RulesTestBuilder.buildV1(); rulesConfigurationTest.registerChannel("additions", additionsChannel, replayChannel); RulesManager rulesManager = new RulesManager(rulesConfigurationTest); rulesManager.start(null, null, null); Cache<String, Object> cache1 = startNodes(2, rulesManager).getCache(); Cache<String, Object> cache2 = startNodes(2, rulesManager).getCache(); String key = "2"; HAKieSession session1 = new HAKieSession(rulesManager, executorService); cache1.put(key, session1); session1.insert(generateFactTenSecondsAfter(1L, 10L)); cache1.put(key, session1); session1.insert(generateFactTenSecondsAfter(1L, 20L)); cache1.put(key, session1); verify(replayChannel, never()).send(any()); InOrder inOrder = inOrder(additionsChannel); inOrder.verify(additionsChannel, times(1)).send(eq(10L)); inOrder.verify(additionsChannel, times(1)).send(eq(30L)); inOrder.verifyNoMoreInteractions(); // Double check on total number of calls to the method send verify(additionsChannel, times(2)).send(any()); HAKieSession serializedSessionCopy = (HAKieSession) cache2.get(key); Assert.assertNotNull(serializedSessionCopy); Assert.assertTrue(serializedSessionCopy.isSerialized()); reset(replayChannel, additionsChannel); ((HAKieSerializedSession) serializedSessionCopy).createSnapshot(); ((HAKieSerializedSession) serializedSessionCopy).waitForSnapshotToComplete(); inOrder = inOrder(replayChannel); inOrder.verify(replayChannel, times(1)).send(eq(30L)); inOrder.verifyNoMoreInteractions(); reset(replayChannel, additionsChannel); rulesConfigurationTest = RulesTestBuilder.buildV1(); rulesConfigurationTest.registerChannel("additions", otherAdditionsChannel, otherReplayChannel); rulesManager = new RulesManager(rulesConfigurationTest); rulesManager.start(null, null, null); byte[] serializedSession = ((HAKieSerializedSession) serializedSessionCopy).getSerializedSession(); HAKieSession session2 = new HAKieSerializedSession(rulesManager, executorService, rulesConfigurationTest.getVersion(), serializedSession).rebuild(); String version = RulesTestBuilder.buildV2().getVersion(); rulesManager.updateToVersion(version); session2.insert(generateFactTenSecondsAfter(1L, 30L)); verify(replayChannel, never()).send(any()); verify(additionsChannel, never()).send(any()); verify(otherReplayChannel, never()).send(any()); verify(otherAdditionsChannel, atMost(1)).send(any()); verify(otherAdditionsChannel, times(1)).send(eq(120L)); logger.info("End test modified rules"); rulesManager.stop(); }
Example 17
Source File: DistributedCacheWriteSkewTest.java From keycloak with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { Cache<String, UserSessionEntity> cache1 = createManager("node1").getCache(InfinispanConnectionProvider.USER_SESSION_CACHE_NAME); Cache<String, UserSessionEntity> cache2 = createManager("node2").getCache(InfinispanConnectionProvider.USER_SESSION_CACHE_NAME); // Create initial item UserSessionEntity session = new UserSessionEntity(); session.setId("123"); session.setRealmId("foo"); session.setBrokerSessionId("!23123123"); session.setBrokerUserId(null); session.setUser("foo"); session.setLoginUsername("foo"); session.setIpAddress("123.44.143.178"); session.setStarted(Time.currentTime()); session.setLastSessionRefresh(Time.currentTime()); AuthenticatedClientSessionEntity clientSession = new AuthenticatedClientSessionEntity(UUID.randomUUID()); clientSession.setAuthMethod("saml"); clientSession.setAction("something"); clientSession.setTimestamp(1234); session.getAuthenticatedClientSessions().put(CLIENT_1_UUID.toString(), clientSession.getId()); cache1.put("123", session); //cache1.replace("123", session); // Create 2 workers for concurrent write and start them Worker worker1 = new Worker(1, cache1); Worker worker2 = new Worker(2, cache2); long start = System.currentTimeMillis(); System.out.println("Started clustering test"); worker1.start(); //worker1.join(); worker2.start(); worker1.join(); worker2.join(); long took = System.currentTimeMillis() - start; session = cache1.get("123"); System.out.println("Took: " + took + " ms. Notes count: " + session.getNotes().size() + ", failedReplaceCounter: " + failedReplaceCounter.get()); // JGroups statistics JChannel channel = (JChannel)((JGroupsTransport)cache1.getAdvancedCache().getRpcManager().getTransport()).getChannel(); System.out.println("Sent MB: " + channel.getSentBytes() / 1000000 + ", sent messages: " + channel.getSentMessages() + ", received MB: " + channel.getReceivedBytes() / 1000000 + ", received messages: " + channel.getReceivedMessages()); // Kill JVM cache1.stop(); cache2.stop(); cache1.getCacheManager().stop(); cache2.getCacheManager().stop(); System.out.println("Managers killed"); }
Example 18
Source File: ClusterTest.java From hacep with Apache License 2.0 | 2 votes |
@Test public void testHASessionAddNode() { System.setProperty("grid.buffer", "10"); LOGGER.info("Start test HASessionID add node"); RulesConfigurationTestImpl rulesConfigurationTest = RulesTestBuilder.buildV1(); rulesConfigurationTest.registerChannel("additions", additionsChannel, replayChannel); RulesManager rulesManager = new RulesManager(rulesConfigurationTest); rulesManager.start(null, null, null); Cache<String, HAKieSession> cache1 = startNodes(2, rulesManager).getCache(); reset(replayChannel, additionsChannel); String key = "3"; HAKieSession session1 = new HAKieSession(rulesManager, executorService); cache1.put(key, session1); session1.insert(generateFactTenSecondsAfter(1L, 10L)); cache1.put(key, session1); session1.insert(generateFactTenSecondsAfter(1L, 20L)); cache1.put(key, session1); session1.insert(generateFactTenSecondsAfter(1L, 30L)); cache1.put(key, session1); verify(replayChannel, never()).send(any()); InOrder inOrder = inOrder(additionsChannel); inOrder.verify(additionsChannel, times(1)).send(eq(10L)); inOrder.verify(additionsChannel, times(1)).send(eq(30L)); inOrder.verify(additionsChannel, times(1)).send(eq(60L)); inOrder.verifyNoMoreInteractions(); // Double check on total number of calls to the method send verify(additionsChannel, times(3)).send(any()); Cache<Key, HAKieSession> cache2 = startNodes(2, rulesManager).getCache(); Object serializedSessionCopy = cache2.get(key); Assert.assertNotNull(serializedSessionCopy); Assert.assertTrue(HAKieSession.class.isAssignableFrom(serializedSessionCopy.getClass())); reset(replayChannel, additionsChannel); HAKieSession session2 = ((HAKieSerializedSession) serializedSessionCopy).rebuild(); session2.insert(generateFactTenSecondsAfter(1L, 40L)); verify(replayChannel, never()).send(any()); verify(additionsChannel, times(1)).send(eq(100L)); // Double check on total number of calls to the method send verify(additionsChannel, times(1)).send(any()); LOGGER.info("End test HASessionID add node"); rulesManager.stop(); }
Example 19
Source File: ClusterTest.java From hacep with Apache License 2.0 | 2 votes |
@Test public void testNonEmptyHASessionRemoveKey() throws InterruptedException { System.setProperty("grid.buffer", "10"); LOGGER.info("Start test non empty HASessionID Remove Key"); RulesConfigurationTestImpl rulesConfigurationTest = RulesTestBuilder.buildV1(); rulesConfigurationTest.registerChannel("additions", additionsChannel, replayChannel); RulesManager rulesManager = new RulesManager(rulesConfigurationTest); rulesManager.start(null, null, null); Cache<String, Object> cache1 = startNodes(2, rulesManager).getCache(); Cache<String, Object> cache2 = startNodes(2, rulesManager).getCache(); String key = "2"; HAKieSession session1 = new HAKieSession(rulesManager, executorService); cache1.put(key, session1); session1.insert(generateFactTenSecondsAfter(1L, 10L)); cache1.put(key, session1); session1.insert(generateFactTenSecondsAfter(1L, 20L)); cache1.put(key, session1); session1.insert(generateFactTenSecondsAfter(1L, 30L)); cache1.put(key, session1); Object serializedSessionCopy = cache2.get(key); Assert.assertNotNull(serializedSessionCopy); Assert.assertTrue(HAKieSerializedSession.class.isAssignableFrom(serializedSessionCopy.getClass())); reset(replayChannel, additionsChannel); cache1.remove(key); cache1.put(key, session1); LOGGER.info("End test non empty HASessionID Remove Key"); rulesManager.stop(); }