Add FileCacheManager

This commit is contained in:
2025-12-24 23:47:14 +05:00
parent 0230cae852
commit 47fe15177f
2 changed files with 318 additions and 0 deletions

View File

@ -0,0 +1,44 @@
package com.backend.hls.proxy.service.cache;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Collection;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.springframework.cache.CacheManager;
public class FileCacheManager implements CacheManager {
private final ConcurrentMap<String, TempFileCache> caches = new ConcurrentHashMap<>();
private final String baseDirectory;
public FileCacheManager(String baseDirectory) {
this.baseDirectory = baseDirectory;
try {
Files.createDirectories(Paths.get(baseDirectory));
} catch (IOException e) {
throw new RuntimeException("Failed to create cache directory", e);
}
}
@Override
public org.springframework.cache.Cache getCache(String name) {
return caches.computeIfAbsent(name, cacheName -> {
try {
return new TempFileCache(cacheName,
Paths.get(baseDirectory, cacheName));
} catch (IOException e) {
throw new RuntimeException("Failed to create cache: " + cacheName, e);
}
});
}
@Override
public Collection<String> getCacheNames() {
return caches.keySet();
}
}

View File

@ -0,0 +1,274 @@
package com.backend.hls.proxy.service.cache;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.RemovalCause;
import org.springframework.cache.support.SimpleValueWrapper;
import org.springframework.lang.Nullable;
import java.io.IOException;
import java.nio.file.*;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantReadWriteLock;
public class TempFileCache implements org.springframework.cache.Cache {
private final String name;
private final Path cacheDirectory;
private final Cache<Object, CacheMetadata> metadataCache;
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
private static class CacheMetadata {
String filename;
long size;
long lastAccessed;
long createdTime;
Path filePath;
CacheMetadata(String filename, long size, Path filePath) {
this.filename = filename;
this.size = size;
this.filePath = filePath;
this.lastAccessed = System.currentTimeMillis();
this.createdTime = System.currentTimeMillis();
}
void updateAccess() {
this.lastAccessed = System.currentTimeMillis();
}
boolean isFileValid() {
return Files.exists(filePath);
}
}
public TempFileCache(String name, Path cacheDirectory) throws IOException {
this.name = name;
this.cacheDirectory = cacheDirectory;
this.metadataCache = Caffeine.newBuilder()
.maximumSize(1000)
.expireAfterAccess(1, TimeUnit.HOURS)
.removalListener((Object key, CacheMetadata metadata, RemovalCause cause) -> {
if (metadata != null && metadata.filePath != null) {
try {
Files.deleteIfExists(metadata.filePath);
} catch (IOException e) {
}
}
})
.build();
Files.createDirectories(cacheDirectory);
cleanUpOrphanedFiles();
}
private void cleanUpOrphanedFiles() {
try {
Files.list(cacheDirectory)
.filter(Files::isRegularFile)
.forEach(file -> {
boolean hasMetadata = metadataCache.asMap().values().stream()
.anyMatch(meta -> meta.filePath.equals(file));
if (!hasMetadata) {
try {
Files.delete(file);
} catch (IOException e) {
}
}
});
} catch (IOException e) {
}
}
@Override
public String getName() {
return name;
}
@Override
public Object getNativeCache() {
return metadataCache;
}
@Override
@Nullable
public ValueWrapper get(Object key) {
lock.readLock().lock();
try {
CacheMetadata meta = metadataCache.getIfPresent(key);
if (meta == null || !meta.isFileValid()) {
return null;
}
try {
byte[] data = Files.readAllBytes(meta.filePath);
meta.updateAccess();
metadataCache.put(key, meta);
return new SimpleValueWrapper(data);
} catch (IOException e) {
metadataCache.invalidate(key);
return null;
}
} finally {
lock.readLock().unlock();
}
}
@Override
@Nullable
public <T> T get(Object key, @Nullable Class<T> type) {
ValueWrapper wrapper = get(key);
if (wrapper == null) {
return null;
}
Object value = wrapper.get();
if (type != null && !type.isInstance(value)) {
throw new IllegalStateException("Cached value is not of required type");
}
return (T) value;
}
@Override
@Nullable
public <T> T get(Object key, Callable<T> valueLoader) {
ValueWrapper wrapper = get(key);
if (wrapper != null) {
return (T) wrapper.get();
}
lock.writeLock().lock();
try {
// Double-check after acquiring write lock
wrapper = get(key);
if (wrapper != null) {
return (T) wrapper.get();
}
T value = valueLoader.call();
if (value instanceof byte[]) {
put(key, value);
}
return value;
} catch (Exception e) {
throw new ValueRetrievalException(key, valueLoader, e);
} finally {
lock.writeLock().unlock();
}
}
@Override
public void put(Object key, @Nullable Object value) {
if (value == null) {
evict(key);
return;
}
if (!(value instanceof byte[])) {
throw new IllegalArgumentException("TempFileCache only supports byte arrays");
}
lock.writeLock().lock();
try {
byte[] data = (byte[]) value;
String filename = generateFilename(key, data);
Path filePath = cacheDirectory.resolve(filename);
Files.write(filePath, data, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);
CacheMetadata newMeta = new CacheMetadata(filename, data.length, filePath);
CacheMetadata oldMeta = metadataCache.asMap().put(key, newMeta);
if (oldMeta != null && oldMeta.filePath != null && !oldMeta.filePath.equals(filePath)) {
Files.deleteIfExists(oldMeta.filePath);
}
} catch (IOException e) {
throw new RuntimeException("Failed to cache file", e);
} finally {
lock.writeLock().unlock();
}
}
private String generateFilename(Object key, byte[] data) {
String hash = Integer.toHexString(key.hashCode());
String sizeSuffix = "_" + data.length;
String timestamp = "_" + System.currentTimeMillis();
return hash + sizeSuffix + timestamp + ".cache";
}
@Override
public void evict(Object key) {
lock.writeLock().lock();
try {
CacheMetadata meta = metadataCache.asMap().remove(key);
if (meta != null && meta.filePath != null) {
try {
Files.deleteIfExists(meta.filePath);
} catch (IOException e) {
// Log warning
}
}
} finally {
lock.writeLock().unlock();
}
}
@Override
public void clear() {
lock.writeLock().lock();
try {
metadataCache.invalidateAll();
try {
Files.list(cacheDirectory)
.filter(Files::isRegularFile)
.forEach(path -> {
try {
Files.delete(path);
} catch (IOException e) {
}
});
} catch (IOException e) {
}
} finally {
lock.writeLock().unlock();
}
}
@Override
@Nullable
public ValueWrapper putIfAbsent(Object key, @Nullable Object value) {
lock.writeLock().lock();
try {
ValueWrapper existing = get(key);
if (existing == null) {
put(key, value);
return null;
}
return existing;
} finally {
lock.writeLock().unlock();
}
}
public com.github.benmanes.caffeine.cache.stats.CacheStats getMetadataStats() {
return metadataCache.stats();
}
public long getCacheSize() {
return metadataCache.asMap().values().stream()
.mapToLong(meta -> meta.size)
.sum();
}
public int getCacheCount() {
return (int) metadataCache.estimatedSize();
}
public void cleanupExpiredEntries() {
metadataCache.cleanUp();
}
}