Skip to contentMethod: put(Object, Object, Descriptors)
1: /*
2: * JOPA
3: * Copyright (C) 2024 Czech Technical University in Prague
4: *
5: * This library is free software; you can redistribute it and/or
6: * modify it under the terms of the GNU Lesser General Public
7: * License as published by the Free Software Foundation; either
8: * version 3.0 of the License, or (at your option) any later version.
9: *
10: * This library is distributed in the hope that it will be useful,
11: * but WITHOUT ANY WARRANTY; without even the implied warranty of
12: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13: * Lesser General Public License for more details.
14: *
15: * You should have received a copy of the GNU Lesser General Public
16: * License along with this library.
17: */
18: package cz.cvut.kbss.jopa.sessions.cache;
19:
20: import cz.cvut.kbss.jopa.model.JOPAPersistenceProperties;
21: import cz.cvut.kbss.jopa.model.descriptors.Descriptor;
22: import cz.cvut.kbss.jopa.sessions.descriptor.LoadStateDescriptor;
23: import org.slf4j.Logger;
24: import org.slf4j.LoggerFactory;
25:
26: import java.net.URI;
27: import java.util.Collections;
28: import java.util.Iterator;
29: import java.util.Map;
30: import java.util.Objects;
31: import java.util.Set;
32: import java.util.concurrent.locks.Lock;
33: import java.util.concurrent.locks.ReadWriteLock;
34: import java.util.concurrent.locks.ReentrantReadWriteLock;
35: import java.util.function.Consumer;
36:
37: /**
38: * This is a fixed-size second level cache implementation with LRU eviction policy.
39: * <p>
40: * When the capacity is reached, the least recently used entry is removed from the cache.
41: */
42: public class LruCacheManager implements CacheManager {
43:
44: private static final Logger LOG = LoggerFactory.getLogger(LruCacheManager.class);
45:
46: /**
47: * Default cache size limit in number of entries.
48: */
49: public static final int DEFAULT_CAPACITY = 512;
50:
51: private final int capacity;
52:
53: private final Lock readLock;
54: private final Lock writeLock;
55:
56: private LruEntityCache entityCache;
57:
58: private Set<Class<?>> inferredClasses;
59:
60: LruCacheManager() {
61: this(Collections.emptyMap());
62: }
63:
64: LruCacheManager(Map<String, String> properties) {
65: Objects.requireNonNull(properties);
66: this.capacity = properties.containsKey(JOPAPersistenceProperties.LRU_CACHE_CAPACITY) ?
67: resolveCapacitySetting(properties) : DEFAULT_CAPACITY;
68: final ReadWriteLock rwLock = new ReentrantReadWriteLock();
69: this.readLock = rwLock.readLock();
70: this.writeLock = rwLock.writeLock();
71: this.entityCache = new LruEntityCache(capacity);
72: }
73:
74: private static int resolveCapacitySetting(Map<String, String> properties) {
75: int capacitySetting = DEFAULT_CAPACITY;
76: try {
77: capacitySetting = Integer.parseInt(properties.get(JOPAPersistenceProperties.LRU_CACHE_CAPACITY));
78: if (capacitySetting <= 0) {
79: LOG.warn("Invalid LRU cache capacity value {}. Using default value.", capacitySetting);
80: capacitySetting = DEFAULT_CAPACITY;
81: }
82: } catch (NumberFormatException e) {
83: LOG.error("Unable to parse LRU cache capacity setting. Using default capacity {}.", DEFAULT_CAPACITY);
84: }
85: return capacitySetting;
86: }
87:
88: int getCapacity() {
89: return capacity;
90: }
91:
92: @Override
93: public void add(Object identifier, Object entity, Descriptors descriptors) {
94: Objects.requireNonNull(identifier);
95: Objects.requireNonNull(entity);
96: Objects.requireNonNull(descriptors);
97:
98: writeLock.lock();
99: try {
100: entityCache.put(identifier, entity, descriptors);
101: } finally {
102: writeLock.unlock();
103: }
104: }
105:
106: @Override
107: public <T> T get(Class<T> cls, Object identifier, Descriptor descriptor) {
108: if (cls == null || identifier == null || descriptor == null) {
109: return null;
110: }
111: readLock.lock();
112: try {
113: return entityCache.get(cls, identifier, descriptor);
114: } finally {
115: readLock.unlock();
116: }
117: }
118:
119: @Override
120: public LoadStateDescriptor<?> getLoadStateDescriptor(Object instance) {
121: if (instance == null) {
122: return null;
123: }
124: readLock.lock();
125: try {
126: return entityCache.getLoadStateDescriptor(instance);
127: } finally {
128: readLock.unlock();
129: }
130: }
131:
132: @Override
133: public void evictInferredObjects() {
134: writeLock.lock();
135: try {
136: getInferredClasses().forEach(this::evict);
137: } finally {
138: writeLock.unlock();
139: }
140: }
141:
142: private Set<Class<?>> getInferredClasses() {
143: if (inferredClasses == null) {
144: return Collections.emptySet();
145: }
146: return inferredClasses;
147: }
148:
149: @Override
150: public void setInferredClasses(Set<Class<?>> inferredClasses) {
151: this.inferredClasses = inferredClasses;
152: }
153:
154: @Override
155: public void close() {
156: evictAll();
157: }
158:
159: @Override
160: public boolean contains(Class<?> cls, Object identifier, Descriptor descriptor) {
161: if (cls == null || identifier == null || descriptor == null) {
162: return false;
163: }
164: readLock.lock();
165: try {
166: return entityCache.contains(cls, identifier, descriptor);
167: } finally {
168: readLock.unlock();
169: }
170: }
171:
172: @Override
173: public void evict(Class<?> cls, Object identifier, URI context) {
174: Objects.requireNonNull(cls);
175: Objects.requireNonNull(identifier);
176:
177: writeLock.lock();
178: try {
179: entityCache.evict(cls, identifier, context);
180: } finally {
181: writeLock.unlock();
182: }
183: }
184:
185: @Override
186: public void evict(Class<?> cls) {
187: Objects.requireNonNull(cls);
188:
189: writeLock.lock();
190: try {
191: entityCache.evict(cls);
192: } finally {
193: writeLock.unlock();
194: }
195: }
196:
197: @Override
198: public void evict(URI context) {
199: writeLock.lock();
200: try {
201: entityCache.evict(context);
202: } finally {
203: writeLock.unlock();
204: }
205: }
206:
207: @Override
208: public void evictAll() {
209: writeLock.lock();
210: try {
211: this.entityCache = new LruEntityCache(capacity);
212: } finally {
213: writeLock.unlock();
214: }
215: }
216:
217: static final class LruEntityCache extends EntityCache implements Consumer<LruCache.CacheNode> {
218:
219: private static final Object NULL_VALUE = null;
220:
221: private final LruCache cache;
222:
223: LruEntityCache(int capacity) {
224: this.cache = new LruCache(capacity, this);
225: }
226:
227: @Override
228: public void accept(LruCache.CacheNode cacheNode) {
229: super.evict(cacheNode.getCls(), cacheNode.getIdentifier(), cacheNode.getContext());
230: }
231:
232: @Override
233: void put(Object identifier, Object entity, Descriptors descriptors) {
234:• if (!isCacheable(descriptors.repositoryDescriptor())) {
235: return;
236: }
237: final URI ctx = descriptors.repositoryDescriptor().getSingleContext().orElse(defaultContext);
238: super.put(identifier, entity, descriptors);
239: cache.put(new LruCache.CacheNode(ctx, entity.getClass(), identifier), NULL_VALUE);
240: }
241:
242: @Override
243: <T> T get(Class<T> cls, Object identifier, Descriptor descriptor) {
244: return getInternal(cls, identifier, descriptor,
245: ctx -> cache.get(new LruCache.CacheNode(ctx, cls, identifier)));
246: }
247:
248: @Override
249: void evict(Class<?> cls, Object identifier, URI context) {
250: final URI ctx = context != null ? context : defaultContext;
251: super.evict(cls, identifier, ctx);
252: cache.remove(new LruCache.CacheNode(ctx, cls, identifier));
253: }
254:
255: @Override
256: void evict(URI context) {
257: final URI ctx = context != null ? context : defaultContext;
258: if (!repoCache.containsKey(ctx)) {
259: return;
260: }
261: final Map<Object, Map<Class<?>, Object>> ctxContent = repoCache.get(ctx);
262: for (Map.Entry<Object, Map<Class<?>, Object>> e : ctxContent.entrySet()) {
263: e.getValue().forEach((cls, instance) -> {
264: descriptors.remove(instance);
265: cache.remove(new LruCache.CacheNode(ctx, cls, e.getKey()));
266: });
267: }
268: ctxContent.clear();
269: // Remove the whole context map
270: repoCache.remove(ctx);
271: }
272:
273: @Override
274: void evict(Class<?> cls) {
275: final Iterator<Map.Entry<URI, Map<Object, Map<Class<?>, Object>>>> repoIt = repoCache.entrySet().iterator();
276: while (repoIt.hasNext()) {
277: final Map.Entry<URI, Map<Object, Map<Class<?>, Object>>> e = repoIt.next();
278: final URI ctx = e.getKey();
279: final Iterator<Map.Entry<Object, Map<Class<?>, Object>>> it = e.getValue().entrySet().iterator();
280: while (it.hasNext()) {
281: final Map.Entry<Object, Map<Class<?>, Object>> idEntry = it.next();
282: final Object instance = idEntry.getValue().remove(cls);
283: if (instance != null) {
284: descriptors.remove(instance);
285: }
286: cache.remove(new LruCache.CacheNode(ctx, cls, idEntry.getKey()));
287: // Remove the whole identifier-based map if the removed node was the last one
288: if (idEntry.getValue().isEmpty()) {
289: it.remove();
290: }
291: }
292: // Remove the whole context map if the removed node was the last one
293: if (e.getValue().isEmpty()) {
294: repoIt.remove();
295: }
296: }
297: }
298: }
299: }