Skip to content

Commit

Permalink
Fix incorrect raw read on RegionFile
Browse files Browse the repository at this point in the history
By not setting the limit in the compressed buffer, the raw read
would read past the compressed data.

Additionally, make the type header allocation lazy to avoid
allocating disk space to headers that are not in-use.
  • Loading branch information
Spottedleaf committed Jun 22, 2024
1 parent 0be4187 commit 2ebdb32
Show file tree
Hide file tree
Showing 5 changed files with 69 additions and 49 deletions.
2 changes: 1 addition & 1 deletion dependency-reduced-pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.spottedleaf</groupId>
<artifactId>sectortool</artifactId>
<version>1.2-SNAPSHOT</version>
<version>1.3-SNAPSHOT</version>
<build>
<defaultGoal>clean package</defaultGoal>
<plugins>
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

<groupId>ca.spottedleaf</groupId>
<artifactId>sectortool</artifactId>
<version>1.2-SNAPSHOT</version>
<version>1.3-SNAPSHOT</version>

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
Expand Down
96 changes: 53 additions & 43 deletions src/main/java/ca/spottedleaf/io/region/SectorFile.java
Original file line number Diff line number Diff line change
Expand Up @@ -236,8 +236,8 @@ public static boolean validate(final XXHash64 xxHash64, final ByteBuffer header,

private static final int MAX_INTERNAL_ALLOCATION_BYTES = SECTOR_SIZE * (1 << SECTOR_LENGTH_BITS);

private static final int TYPE_HEADER_OFFSET_COUNT = SECTION_SIZE * SECTION_SIZE; // total number of offsets per type header
private static final int TYPE_HEADER_SECTORS = (TYPE_HEADER_OFFSET_COUNT * INT_SIZE) / SECTOR_SIZE; // total number of sectors used per type header
public static final int TYPE_HEADER_OFFSET_COUNT = SECTION_SIZE * SECTION_SIZE; // total number of offsets per type header
public static final int TYPE_HEADER_SECTORS = (TYPE_HEADER_OFFSET_COUNT * INT_SIZE) / SECTOR_SIZE; // total number of sectors used per type header

// header location is just raw sector number
// so, we point to the header itself to indicate absence
Expand Down Expand Up @@ -329,9 +329,8 @@ public static long computeHash(final ByteBuffer buffer, final int offset) {
private final Int2ObjectMap<String> typeTranslationTable;
private final FileHeader fileHeader = new FileHeader();

private void checkReadOnlyHeader(final int type) {
// we want to error when a type is used which is not mapped, but we can only store into typeHeaders in write mode
// as sometimes we may need to create absent type headers
private void checkHeaderExists(final int type) {
// we want to error when a type is used which is not mapped
if (this.typeTranslationTable.get(type) == null) {
throw new IllegalArgumentException("Unknown type " + type);
}
Expand Down Expand Up @@ -381,44 +380,33 @@ public SectorFile(final File file, final int sectionX, final int sectionZ,
this.readFileHeader(unscopedBufferChoices);
}

boolean modifiedFileHeader = false;
// validate types
for (final IntIterator iterator = typeTranslationTable.keySet().iterator(); iterator.hasNext(); ) {
final int type = iterator.nextInt();

if (type < 0 || type >= MAX_TYPES) {
throw new IllegalStateException("Type translation table contains illegal type: " + type);
}
}
}

private TypeHeader createTypeHeader(final int type, final BufferChoices unscopedBufferChoices) throws IOException {
try (final BufferChoices scopedBufferChoices = unscopedBufferChoices.scope()) {
final ByteBuffer ioBuffer = scopedBufferChoices.t16k().acquireDirectBuffer();
final int offset = this.sectorAllocator.allocate(TYPE_HEADER_SECTORS, false); // in sectors
if (offset <= 0) {
throw new IllegalStateException("Cannot allocate space for header " + this.debugType(type) + ":" + offset);
}

// make sure we have the type headers required allocated
for (final IntIterator iterator = typeTranslationTable.keySet().iterator(); iterator.hasNext(); ) {
final int type = iterator.nextInt();

if (type < 0 || type >= MAX_TYPES) {
throw new IllegalStateException("Type translation table contains illegal type: " + type);
}

final TypeHeader headerData = this.typeHeaders.get(type);
if (headerData != null || readOnly) {
// allocated or unable to allocate
continue;
}

modifiedFileHeader = true;
final TypeHeader ret = new TypeHeader();

// need to allocate space for new type header
final int offset = this.sectorAllocator.allocate(TYPE_HEADER_SECTORS, false); // in sectors
if (offset <= 0) {
throw new IllegalStateException("Cannot allocate space for header " + this.debugType(type) + ":" + offset);
}
this.fileHeader.typeHeaderOffsets[type] = offset;
// hash will be computed by writeTypeHeader
this.typeHeaders.put(type, ret);

this.fileHeader.typeHeaderOffsets[type] = offset;
// hash will be computed by writeTypeHeader
this.typeHeaders.put(type, new TypeHeader());
this.writeTypeHeader(ioBuffer, type, true, true);

this.writeTypeHeader(ioBuffer, type, true, false);
}

// modified the file header, so write it back
if (modifiedFileHeader) {
this.writeFileHeader(ioBuffer);
}
return ret;
}
}

Expand Down Expand Up @@ -700,6 +688,18 @@ class TentativeTypeHeader {
final int type = entry.getIntKey();
final TentativeTypeHeader tentativeTypeHeader = entry.getValue();

boolean hasData = false;
for (final int location : tentativeTypeHeader.typeHeader.locations) {
if (location != ABSENT_LOCATION) {
hasData = true;
break;
}
}

if (!hasData) {
continue;
}

final int sectorOffset = newSectorAllocation.allocate(TYPE_HEADER_SECTORS, false);
if (sectorOffset < 0) {
throw new IllegalStateException("Failed to allocate type header");
Expand All @@ -717,10 +717,11 @@ class TentativeTypeHeader {

boolean changes = false;

for (final Iterator<Int2ObjectMap.Entry<TypeHeader>> iterator = newHeaders.int2ObjectEntrySet().fastIterator(); iterator.hasNext();) {
final Int2ObjectMap.Entry<TypeHeader> entry = iterator.next();
// make sure to use the tentative type headers, in case the tentative header was not allocated due to being empty
for (final Iterator<Int2ObjectMap.Entry<TentativeTypeHeader>> iterator = newTypeHeaders.int2ObjectEntrySet().fastIterator(); iterator.hasNext();) {
final Int2ObjectMap.Entry<TentativeTypeHeader> entry = iterator.next();
final int type = entry.getIntKey();
final TypeHeader newTypeHeader = entry.getValue();
final TypeHeader newTypeHeader = entry.getValue().typeHeader;
final TypeHeader oldTypeHeader = this.typeHeaders.get(type);

boolean hasChanges;
Expand Down Expand Up @@ -1075,7 +1076,7 @@ public boolean hasData(final int localX, final int localZ, final int type) {
final TypeHeader typeHeader = this.typeHeaders.get(type);

if (typeHeader == null) {
this.checkReadOnlyHeader(type);
this.checkHeaderExists(type);
return false;
}

Expand Down Expand Up @@ -1123,7 +1124,7 @@ private SectorFileInput read(final BufferChoices scopedBufferChoices, final Byte
final TypeHeader typeHeader = this.typeHeaders.get(type);

if (typeHeader == null) {
this.checkReadOnlyHeader(type);
this.checkHeaderExists(type);
return NULL_DATA;
}

Expand Down Expand Up @@ -1246,7 +1247,7 @@ public boolean delete(final BufferChoices unscopedBufferChoices, final int local
final TypeHeader typeHeader = this.typeHeaders.get(type);

if (typeHeader == null) {
this.checkReadOnlyHeader(type);
this.checkHeaderExists(type);
return false;
}

Expand Down Expand Up @@ -1299,7 +1300,7 @@ public SectorFileOutput write(final BufferChoices scopedBufferChoices, final int
throw new UnsupportedOperationException("Sectorfile is read-only");
}

if (this.typeHeaders.get(type) == null) {
if (!this.typeHeaders.containsKey(type) && !this.typeTranslationTable.containsKey(type)) {
throw new IllegalArgumentException("Unknown type " + type);
}

Expand Down Expand Up @@ -1553,8 +1554,17 @@ protected ByteBuffer flush(final ByteBuffer current) throws IOException {
return current;
}

private void checkAndCreateTypeHeader() throws IOException {
if (SectorFile.this.typeHeaders.get(this.type) == null) {
SectorFile.this.createTypeHeader(this.type, this.scopedBufferChoices);
}
}

// assume flush() is called before this
private void save() throws IOException {
// lazily create type header
this.checkAndCreateTypeHeader();

if (this.externalFile == null) {
// avoid clobbering buffer positions/limits
final ByteBuffer buffer = this.buffer.duplicate();
Expand Down
7 changes: 6 additions & 1 deletion src/main/java/ca/spottedleaf/sectortool/analyse/Analyse.java
Original file line number Diff line number Diff line change
Expand Up @@ -60,13 +60,15 @@ private static class StatsAccumulator {
public long fileSectors = 0L;
public long allocatedSectors = 0L;
public long alternateAllocatedSectors = 0L;
public long alternateAllocatedSectorsPadded = 0L;
public long dataSizeBytes = 0L;
public long errors = 0L;

public synchronized void accumulate(final RegionFile.AllocationStats stats) {
this.fileSectors += stats.fileSectors();
this.allocatedSectors += stats.allocatedSectors();
this.alternateAllocatedSectors += stats.alternateAllocatedSectors();
this.alternateAllocatedSectorsPadded += stats.alternateAllocatedSectorsPadded();
this.dataSizeBytes += stats.dataSizeBytes();
this.errors += (long)stats.errors();
}
Expand All @@ -75,6 +77,7 @@ public void print() {
System.out.println("File sectors: " + this.fileSectors);
System.out.println("Allocated sectors: " + this.allocatedSectors);
System.out.println("Alternate allocated sectors: " + this.alternateAllocatedSectors);
System.out.println("Alternate allocated sectors padded: " + this.alternateAllocatedSectorsPadded);
System.out.println("Total data size: " + this.dataSizeBytes);
System.out.println("Errors: " + this.errors);
}
Expand Down Expand Up @@ -133,7 +136,9 @@ public void run() {

final RegionFile.AllocationStats stats;
try {
stats = regionFile.computeStats(unscopedBufferChoices, SectorFile.SECTOR_SIZE, SectorFile.DataHeader.DATA_HEADER_LENGTH);
stats = regionFile.computeStats(unscopedBufferChoices, SectorFile.SECTOR_SIZE,
SectorFile.TYPE_HEADER_SECTORS + SectorFile.FileHeader.FILE_HEADER_TOTAL_SECTORS,
SectorFile.DataHeader.DATA_HEADER_LENGTH);
} catch (final IOException ex) {
synchronized (System.err) {
System.err.println("Failed to read stats from regionfile '" + regionFile.file.getAbsolutePath() + "': ");
Expand Down
11 changes: 8 additions & 3 deletions src/main/java/ca/spottedleaf/sectortool/storage/RegionFile.java
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ private File getExternalFile(final int x, final int z) {
return new File(this.file.getParentFile(), "c." + cx + "." + cz + ".mcc");
}

public static record AllocationStats(long fileSectors, long allocatedSectors, long alternateAllocatedSectors, long dataSizeBytes, int errors) {}
public static record AllocationStats(long fileSectors, long allocatedSectors, long alternateAllocatedSectors, long alternateAllocatedSectorsPadded, long dataSizeBytes, int errors) {}

private int[] getHeaderSorted() {
final IntArrayList list = new IntArrayList(this.header.length);
Expand All @@ -218,11 +218,12 @@ private int[] getHeaderSorted() {
}

public AllocationStats computeStats(final BufferChoices unscopedBufferChoices, final int alternateSectorSize,
final int alternateHeaderSectors,
final int alternateOverhead) throws IOException {
final long fileSectors = (this.file.length() + (SECTOR_SIZE - 1)) >> SECTOR_SHIFT;

long allocatedSectors = Math.min(fileSectors, 2L);
long alternateAllocatedSectors = 0L;
long alternateAllocatedSectors = (long)alternateHeaderSectors;
int errors = 0;
long dataSize = 0L;

Expand Down Expand Up @@ -272,7 +273,10 @@ public AllocationStats computeStats(final BufferChoices unscopedBufferChoices, f
}
}

return new AllocationStats(fileSectors, allocatedSectors, alternateAllocatedSectors, dataSize, errors);
final long diff = SECTOR_SIZE / alternateSectorSize;
final long alternateAllocatedSectorsPadded = diff <= 1L ? alternateAllocatedSectors : ((alternateAllocatedSectors + (diff - 1L)) / diff) * diff;

return new AllocationStats(fileSectors, allocatedSectors, alternateAllocatedSectors, alternateAllocatedSectorsPadded, dataSize, errors);
}

public boolean read(final int x, final int z, final BufferChoices unscopedBufferChoices, final RegionFile.CustomByteArrayOutputStream decompressed) throws IOException {
Expand Down Expand Up @@ -324,6 +328,7 @@ public int read(final int x, final int z, final BufferChoices unscopedBufferChoi
final int length = compressedData.getInt(0) - BYTE_SIZE;
byte type = compressedData.get(0 + INT_SIZE);
compressedData.position(0 + INT_SIZE + BYTE_SIZE);
compressedData.limit(compressedData.getInt(0) + INT_SIZE);

if (compressedData.remaining() < length) {
throw new EOFException("Truncated data");
Expand Down

0 comments on commit 2ebdb32

Please sign in to comment.