Skip to content

Commit

Permalink
use TsFileID as cache key
Browse files Browse the repository at this point in the history
  • Loading branch information
shuwenwei committed Dec 26, 2024
1 parent 6b53f75 commit 2f27468
Show file tree
Hide file tree
Showing 5 changed files with 32 additions and 113 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.apache.iotdb.db.conf.IoTDBConfig;
import org.apache.iotdb.db.conf.IoTDBDescriptor;
import org.apache.iotdb.db.storageengine.dataregion.read.control.FileReaderManager;
import org.apache.iotdb.db.storageengine.dataregion.tsfile.TsFileID;

import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
Expand Down Expand Up @@ -155,23 +156,11 @@ public static class BloomFilterCacheKey {
// because filePath is get from TsFileResource, different BloomFilterCacheKey of the same file
// share this String.
private final String filePath;
private final int regionId;
private final long timePartitionId;
private final long tsFileVersion;
// high 32 bit is compaction level, low 32 bit is merge count
private final long compactionVersion;

public BloomFilterCacheKey(
String filePath,
int regionId,
long timePartitionId,
long tsFileVersion,
long compactionVersion) {
private final TsFileID tsFileID;

public BloomFilterCacheKey(String filePath, TsFileID tsFileID) {
this.filePath = filePath;
this.regionId = regionId;
this.timePartitionId = timePartitionId;
this.tsFileVersion = tsFileVersion;
this.compactionVersion = compactionVersion;
this.tsFileID = tsFileID;
}

@Override
Expand All @@ -183,15 +172,12 @@ public boolean equals(Object o) {
return false;
}
BloomFilterCacheKey that = (BloomFilterCacheKey) o;
return regionId == that.regionId
&& timePartitionId == that.timePartitionId
&& tsFileVersion == that.tsFileVersion
&& compactionVersion == that.compactionVersion;
return Objects.equals(tsFileID, that.tsFileID);
}

@Override
public int hashCode() {
return Objects.hash(regionId, timePartitionId, tsFileVersion, compactionVersion);
return Objects.hash(tsFileID);
}

public long getRetainedSizeInBytes() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -218,11 +218,7 @@ public static class ChunkCacheKey {
// because filePath is get from TsFileResource, different ChunkCacheKey of the same file
// share this String.
private final String filePath;
private final int regionId;
private final long timePartitionId;
private final long tsFileVersion;
// high 32 bit is compaction level, low 32 bit is merge count
private final long compactionVersion;
private final TsFileID tsFileID;

private final long offsetOfChunkHeader;

Expand All @@ -233,10 +229,7 @@ public static class ChunkCacheKey {
public ChunkCacheKey(
String filePath, TsFileID tsfileId, long offsetOfChunkHeader, boolean closed) {
this.filePath = filePath;
this.regionId = tsfileId.regionId;
this.timePartitionId = tsfileId.timePartitionId;
this.tsFileVersion = tsfileId.fileVersion;
this.compactionVersion = tsfileId.compactionVersion;
this.tsFileID = tsfileId;
this.offsetOfChunkHeader = offsetOfChunkHeader;
this.closed = closed;
}
Expand All @@ -258,17 +251,13 @@ public boolean equals(Object o) {
return false;
}
ChunkCacheKey that = (ChunkCacheKey) o;
return regionId == that.regionId
&& timePartitionId == that.timePartitionId
&& tsFileVersion == that.tsFileVersion
&& compactionVersion == that.compactionVersion
return Objects.equals(tsFileID, that.tsFileID)
&& offsetOfChunkHeader == that.offsetOfChunkHeader;
}

@Override
public int hashCode() {
return Objects.hash(
regionId, timePartitionId, tsFileVersion, compactionVersion, offsetOfChunkHeader);
return Objects.hash(tsFileID, offsetOfChunkHeader);
}

@Override
Expand All @@ -278,13 +267,13 @@ public String toString() {
+ filePath
+ '\''
+ ", regionId="
+ regionId
+ tsFileID.regionId
+ ", timePartitionId="
+ timePartitionId
+ tsFileID.timePartitionId
+ ", tsFileVersion="
+ tsFileVersion
+ tsFileID.fileVersion
+ ", compactionVersion="
+ compactionVersion
+ tsFileID.compactionVersion
+ ", offsetOfChunkHeader="
+ offsetOfChunkHeader
+ '}';
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,12 +167,7 @@ public TimeseriesMetadata get(
BloomFilter bloomFilter =
BloomFilterCache.getInstance()
.get(
new BloomFilterCache.BloomFilterCacheKey(
filePath,
key.regionId,
key.timePartitionId,
key.tsFileVersion,
key.compactionVersion),
new BloomFilterCache.BloomFilterCacheKey(filePath, key.tsFileID),
debug,
bloomFilterIoSizeRecorder,
queryContext.getQueryStatistics().getLoadBloomFilterFromCacheCount()
Expand Down Expand Up @@ -204,12 +199,7 @@ public TimeseriesMetadata get(
for (TimeseriesMetadata metadata : timeSeriesMetadataList) {
TimeSeriesMetadataCacheKey k =
new TimeSeriesMetadataCacheKey(
key.regionId,
key.timePartitionId,
key.tsFileVersion,
key.compactionVersion,
key.device,
metadata.getMeasurementId());
key.tsFileID, key.device, metadata.getMeasurementId());
if (metadata.getStatistics().getCount() != 0) {
lruCache.put(k, metadata);
}
Expand Down Expand Up @@ -303,34 +293,12 @@ public static class TimeSeriesMetadataCacheKey {
RamUsageEstimator.shallowSizeOfInstance(TimeSeriesMetadataCacheKey.class)
+ RamUsageEstimator.shallowSizeOfInstance(String.class);

private final int regionId;
private final long timePartitionId;
private final long tsFileVersion;
// high 32 bit is compaction level, low 32 bit is merge count
private final long compactionVersion;
private final TsFileID tsFileID;
private final IDeviceID device;
private final String measurement;

public TimeSeriesMetadataCacheKey(TsFileID tsFileID, IDeviceID device, String measurement) {
this.regionId = tsFileID.regionId;
this.timePartitionId = tsFileID.timePartitionId;
this.tsFileVersion = tsFileID.fileVersion;
this.compactionVersion = tsFileID.compactionVersion;
this.device = device;
this.measurement = measurement;
}

public TimeSeriesMetadataCacheKey(
int regionId,
long timePartitionId,
long tsFileVersion,
long compactionVersion,
IDeviceID device,
String measurement) {
this.regionId = regionId;
this.timePartitionId = timePartitionId;
this.tsFileVersion = tsFileVersion;
this.compactionVersion = compactionVersion;
this.tsFileID = tsFileID;
this.device = device;
this.measurement = measurement;
}
Expand All @@ -348,31 +316,27 @@ public boolean equals(Object o) {
return false;
}
TimeSeriesMetadataCacheKey that = (TimeSeriesMetadataCacheKey) o;
return regionId == that.regionId
&& timePartitionId == that.timePartitionId
&& tsFileVersion == that.tsFileVersion
&& compactionVersion == that.compactionVersion
return Objects.equals(tsFileID, that.tsFileID)
&& Objects.equals(device, that.device)
&& Objects.equals(measurement, that.measurement);
}

@Override
public int hashCode() {
return Objects.hash(
regionId, timePartitionId, tsFileVersion, compactionVersion, device, measurement);
return Objects.hash(tsFileID, device, measurement);
}

@Override
public String toString() {
return "TimeSeriesMetadataCacheKey{"
+ "regionId="
+ regionId
+ tsFileID.regionId
+ ", timePartitionId="
+ timePartitionId
+ tsFileID.timePartitionId
+ ", tsFileVersion="
+ tsFileVersion
+ tsFileID.fileVersion
+ ", compactionVersion="
+ compactionVersion
+ tsFileID.compactionVersion
+ ", device='"
+ device
+ '\''
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -239,8 +239,9 @@ protected boolean doCompaction() {
if (!tsFileManager.isAllowCompaction()) {
return true;
}
if ((filesView.sequence
&& !IoTDBDescriptor.getInstance().getConfig().isEnableSeqSpaceCompaction())
if ((this.getCompactionTaskType() != CompactionTaskType.REPAIR)
&& (filesView.sequence
&& !IoTDBDescriptor.getInstance().getConfig().isEnableSeqSpaceCompaction())
|| (!filesView.sequence
&& !IoTDBDescriptor.getInstance().getConfig().isEnableUnseqSpaceCompaction())) {
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,13 +96,7 @@ public void testGet() {
for (String filePath : pathList) {
TsFileID tsFileID = new TsFileID(filePath);
BloomFilter bloomFilter =
bloomFilterCache.get(
new BloomFilterCache.BloomFilterCacheKey(
filePath,
tsFileID.regionId,
tsFileID.timePartitionId,
tsFileID.fileVersion,
tsFileID.compactionVersion));
bloomFilterCache.get(new BloomFilterCache.BloomFilterCacheKey(filePath, tsFileID));
TsFileSequenceReader reader = FileReaderManager.getInstance().get(filePath, true);
BloomFilter bloomFilter1 = reader.readBloomFilter();
Assert.assertEquals(bloomFilter1, bloomFilter);
Expand All @@ -120,12 +114,7 @@ public void testRemove() {
String path = pathList.get(0);
TsFileID tsFileID = new TsFileID(path);
BloomFilterCache.BloomFilterCacheKey key =
new BloomFilterCache.BloomFilterCacheKey(
path,
tsFileID.regionId,
tsFileID.timePartitionId,
tsFileID.fileVersion,
tsFileID.compactionVersion);
new BloomFilterCache.BloomFilterCacheKey(path, tsFileID);
BloomFilter bloomFilter = bloomFilterCache.get(key);
TsFileSequenceReader reader = FileReaderManager.getInstance().get(path, true);
BloomFilter bloomFilter1 = reader.readBloomFilter();
Expand All @@ -146,12 +135,7 @@ public void testClear() {
for (String path : pathList) {
TsFileID tsFileID = new TsFileID(path);
BloomFilterCache.BloomFilterCacheKey key =
new BloomFilterCache.BloomFilterCacheKey(
path,
tsFileID.regionId,
tsFileID.timePartitionId,
tsFileID.fileVersion,
tsFileID.compactionVersion);
new BloomFilterCache.BloomFilterCacheKey(path, tsFileID);
BloomFilter bloomFilter = bloomFilterCache.get(key);
TsFileSequenceReader reader = FileReaderManager.getInstance().get(path, true);
BloomFilter bloomFilter1 = reader.readBloomFilter();
Expand All @@ -162,12 +146,7 @@ public void testClear() {
for (String path : pathList) {
TsFileID tsFileID = new TsFileID(path);
BloomFilterCache.BloomFilterCacheKey key =
new BloomFilterCache.BloomFilterCacheKey(
path,
tsFileID.regionId,
tsFileID.timePartitionId,
tsFileID.fileVersion,
tsFileID.compactionVersion);
new BloomFilterCache.BloomFilterCacheKey(path, tsFileID);
BloomFilter bloomFilter = bloomFilterCache.getIfPresent(key);
Assert.assertNull(bloomFilter);
}
Expand Down

0 comments on commit 2f27468

Please sign in to comment.