Skip to content
62 changes: 41 additions & 21 deletions PolarWorlds/PolarLoader.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,11 @@ public class PolarLoader : ITerrainProvider {
private const int MaxHeightmaps = 32;
private const int BlockPaletteSize = 4096;
private const int DataVersion = 4325;
private const int InitialPaletteCapacity = 256; // Typical palette size estimate

public Dictionary<Vec2<int>, ChunkData> Chunks = null!;
private MinecraftRegistry _registry;
private Dictionary<string, IBlock> _blockStringCache = new(); // Cache parsed block strings

public PolarLoader(string path, MinecraftRegistry registry) : this(File.ReadAllBytes(path), registry) {

Expand Down Expand Up @@ -58,15 +60,13 @@ public static byte[] CreateWorld(ChunkData[] chunks, MinecraftRegistry? registry

data.WriteVarInt(chunks.Length); // Number of chunks
foreach (ChunkData chunk in chunks) {
Console.WriteLine("Writing chunk: " + chunk.ChunkX + ", " + chunk.ChunkZ);
WriteChunk(data, chunk, registry);
}

// Cool, now we compress the data
writer.WriteByte((sbyte)CompressionType.Zstd); // Compression type
byte[] compressedData = CompressZstd(data.ToArray());
writer.WriteVarInt((int)data.Length); // Original length of the data before compression
Console.WriteLine("Compressed data length: " + data.Length);
writer.Write(compressedData); // Compressed data
return writer.ToArray();
}
Expand Down Expand Up @@ -127,7 +127,9 @@ private static void DecompressZstd(DataReader reader, int length) {
}

private static byte[] CompressZstd(ReadOnlySpan<byte> data) {
using Compressor compressor = new();
// Use level 1 for faster compression (default is 3)
// This trades compression ratio for speed
using Compressor compressor = new(1);
return compressor.Wrap(data).ToArray();
}

Expand Down Expand Up @@ -242,22 +244,26 @@ private ChunkData ReadChunk(DataReader reader, short version, int dataVersion, i
}

private static void WriteSection(DataWriter writer, ChunkSection section, MinecraftRegistry registry) {
Dictionary<uint, int> stateIdToPaletteIndex = new(InitialPaletteCapacity); // Map state ID directly to palette index
List<string> blockPalette = [];
ushort[] blockData = new ushort[ChunkSection.Size * ChunkSection.Size * ChunkSection.Size];
int cBlockDataIndex = 0;

// Access blocks array directly - much faster than LookupBlock
uint[,,] blocks = section.Blocks;

for (int y = 0; y < ChunkSection.Size; y++) {
for (int z = 0; z < ChunkSection.Size; z++) {
for (int x = 0; x < ChunkSection.Size; x++) {
IBlock block = section.LookupBlock(x, y, z, registry);
string blockStr = GetStateStringFromBlock(block);
uint stateId = blocks[x, y, z];

int paletteIndex;
if (blockPalette.Contains(blockStr)) {
paletteIndex = blockPalette.IndexOf(blockStr);
}
else {
// Use state ID as key - only convert to string once per unique state
if (!stateIdToPaletteIndex.TryGetValue(stateId, out int paletteIndex)) {
paletteIndex = blockPalette.Count;
blockPalette.Add(blockStr);
stateIdToPaletteIndex[stateId] = paletteIndex;
// Only lookup block when we need to convert to string for the first time
IBlock block = registry.Blocks.GetByStateId(stateId);
blockPalette.Add(GetStateStringFromBlock(block));
}
blockData[cBlockDataIndex++] = (ushort)paletteIndex;
}
Expand Down Expand Up @@ -305,16 +311,21 @@ private ChunkSection ReadSection(DataReader reader, short version, int dataVersi

string[] blockPalette = reader.ReadPrefixedArray(r => r.ReadString());
if (blockPalette.Length > 1) {
// Convert palette strings to state IDs once instead of repeatedly in the loop
uint[] stateIdPalette = new uint[blockPalette.Length];
for (int i = 0; i < blockPalette.Length; i++) {
stateIdPalette[i] = GetBlockFromString(blockPalette[i]).StateId;
}

int bitsPerEntry = (int) Math.Ceiling(Math.Log(blockPalette.Length) / Math.Log(2));
ushort[] blockData = reader.ReadPrefixedPacketDataArray(bitsPerEntry); // palette indices for blocks

// Optimized loop with direct indexing
int dataIndex = 0;
for (int y = 0; y < ChunkSection.Size; y++) {
for (int z = 0; z < ChunkSection.Size; z++) {
for (int x = 0; x < ChunkSection.Size; x++) {
int index = y * ChunkSection.Size * ChunkSection.Size + z * ChunkSection.Size + x;
string key = blockPalette[blockData[index]];

uint stateId = GetBlockFromString(key).StateId;
section.Blocks[x, y, z] = stateId;
section.Blocks[x, y, z] = stateIdPalette[blockData[dataIndex++]];
}
}
}
Expand Down Expand Up @@ -348,6 +359,11 @@ private ChunkSection ReadSection(DataReader reader, short version, int dataVersi
}

private IBlock GetBlockFromString(string blockStr) {
// Cache to avoid re-parsing the same block strings
if (_blockStringCache.TryGetValue(blockStr, out IBlock? cachedBlock)) {
return cachedBlock;
}

// Example: "minecraft:stone[variant=granite]"
string[] parts = blockStr.Split('[', 2);
string blockName = parts[0];
Expand All @@ -360,11 +376,14 @@ private IBlock GetBlockFromString(string blockStr) {
IBlock block = _registry.Blocks[blockName];

if (parts.Length <= 1) {
_blockStringCache[blockStr] = block;
return block;
}

CompoundTag properties = PropertiesStringToNbt(parts[1].TrimEnd(']'));
return block.WithState(properties);
IBlock result = block.WithState(properties);
_blockStringCache[blockStr] = result;
return result;
}

private static CompoundTag PropertiesStringToNbt(string propsStr) {
Expand Down Expand Up @@ -394,7 +413,11 @@ private static string GetStateStringFromBlock(IBlock block) {

private static string GetPropsStringFromBlock(IBlock block) {
CompoundTag properties = block.ToStateNbt();
List<string> props = [];
if (properties.Children.Length == 0) {
return string.Empty;
}

List<string> props = new(properties.Children.Length);
foreach (INbtTag? tag in properties.Children) {
if (tag is StringTag stringTag) {
props.Add($"{stringTag.Name}={stringTag.Value}");
Expand All @@ -421,7 +444,6 @@ private enum LightContent {
public void GetChunk(ref ChunkData chunk) {
Chunks.TryGetValue(new Vec2<int>(chunk.ChunkX, chunk.ChunkZ), out ChunkData? data);
if (data == null) {
Console.WriteLine("Polar chunk not found: " + chunk.ChunkX + ", " + chunk.ChunkZ);
return;
}

Expand All @@ -432,8 +454,6 @@ public void GetChunks(int start, int count, ChunkData[] chunks) {
for (int i = start; i < start + count; i++) {
if (Chunks.TryGetValue(new Vec2<int>(chunks[i].ChunkX, chunks[i].ChunkZ), out ChunkData? data)) {
chunks[i] = data;
} else {
Console.WriteLine("Polar chunk not found: " + chunks[i].ChunkX + ", " + chunks[i].ChunkZ);
}
}
}
Expand Down
100 changes: 100 additions & 0 deletions Tests/PolarBenchmark.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
using System.Diagnostics;
using Minecraft.Data.Generated;
using Minecraft.Schemas.Chunks;
using PolarWorlds;

namespace Tests;

public class PolarBenchmark {

[Test]
public void BenchmarkPolarWorldCreation() {
Console.WriteLine("=== Polar World Creation Benchmark ===");

// Create test chunks with random data
const int chunkCount = 10;
ChunkData[] chunks = new ChunkData[chunkCount];
for (int i = 0; i < chunkCount; i++) {
chunks[i] = new ChunkData(384) {
ChunkX = i * 32,
ChunkZ = i * 32
};
chunks[i].FillRandom();
}

// Warmup
Console.WriteLine("Warming up...");
byte[] warmupData = PolarLoader.CreateWorld(chunks);

// Benchmark
Stopwatch sw = Stopwatch.StartNew();
byte[] polarData = PolarLoader.CreateWorld(chunks);
sw.Stop();

Console.WriteLine($"Time to create world with {chunkCount} chunks: {sw.ElapsedMilliseconds}ms");
Console.WriteLine($"Output size: {polarData.Length} bytes");
Console.WriteLine($"Average time per chunk: {sw.ElapsedMilliseconds / (double)chunkCount:F2}ms");
}

[Test]
public void BenchmarkPolarWorldLoading() {
Console.WriteLine("=== Polar World Loading Benchmark ===");

// Create test data
const int chunkCount = 10;
ChunkData[] chunks = new ChunkData[chunkCount];
for (int i = 0; i < chunkCount; i++) {
chunks[i] = new ChunkData(384) {
ChunkX = i * 32,
ChunkZ = i * 32
};
chunks[i].FillRandom();
}

byte[] polarData = PolarLoader.CreateWorld(chunks);

// Warmup
Console.WriteLine("Warming up...");
PolarLoader warmupLoader = new(polarData, VanillaRegistry.Data);

// Benchmark
Stopwatch sw = Stopwatch.StartNew();
PolarLoader loader = new(polarData, VanillaRegistry.Data);
sw.Stop();

Console.WriteLine($"Time to load world with {chunkCount} chunks: {sw.ElapsedMilliseconds}ms");
Console.WriteLine($"Average time per chunk: {sw.ElapsedMilliseconds / (double)chunkCount:F2}ms");
}

[Test]
public void BenchmarkLargerWorld() {
Console.WriteLine("=== Large Polar World Benchmark ===");

const int chunkCount = 50;
ChunkData[] chunks = new ChunkData[chunkCount];
for (int i = 0; i < chunkCount; i++) {
chunks[i] = new ChunkData(384) {
ChunkX = i % 10,
ChunkZ = i / 10
};
chunks[i].FillRandom();
}

Console.WriteLine("Creating world...");
Stopwatch sw = Stopwatch.StartNew();
byte[] polarData = PolarLoader.CreateWorld(chunks);
sw.Stop();
long creationTime = sw.ElapsedMilliseconds;

Console.WriteLine("Loading world...");
sw.Restart();
PolarLoader loader = new(polarData, VanillaRegistry.Data);
sw.Stop();
long loadTime = sw.ElapsedMilliseconds;

Console.WriteLine($"\nResults for {chunkCount} chunks:");
Console.WriteLine($" Creation: {creationTime}ms ({creationTime / (double)chunkCount:F2}ms per chunk)");
Console.WriteLine($" Loading: {loadTime}ms ({loadTime / (double)chunkCount:F2}ms per chunk)");
Console.WriteLine($" Size: {polarData.Length} bytes");
}
}
80 changes: 80 additions & 0 deletions Tests/PolarDetailedProfiling.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
using System.Diagnostics;
using System.Reflection;
using Minecraft.Data.Generated;
using Minecraft.Schemas.Chunks;
using PolarWorlds;

namespace Tests;

public class PolarDetailedProfiling {

[Test]
public void ProfileSectionWriting() {
Console.WriteLine("=== Profiling Section Writing ===");

ChunkData chunk = new ChunkData(384) {
ChunkX = 0,
ChunkZ = 0
};
chunk.FillRandom();

// Get the WriteSection method via reflection
var writerType = typeof(PolarLoader);
var dataWriterType = Assembly.Load("Minecraft").GetType("Minecraft.DataWriter");
var chunkSectionType = Assembly.Load("Minecraft").GetType("Minecraft.Schemas.Chunks.ChunkSection");

// Warm up
for (int i = 0; i < 3; i++) {
_ = PolarLoader.CreateWorld([chunk]);
}

// Measure just the section writing part (all sections)
Stopwatch sw = Stopwatch.StartNew();
int sectionCount = 0;
foreach (var section in chunk.Sections) {
sectionCount++;
}
sw.Stop();

Console.WriteLine($"Chunk has {sectionCount} sections");
Console.WriteLine($"Time to iterate sections: {sw.ElapsedMilliseconds}ms");

// Now measure full creation
sw.Restart();
byte[] result = PolarLoader.CreateWorld([chunk]);
sw.Stop();

Console.WriteLine($"Full creation time (1 chunk): {sw.ElapsedMilliseconds}ms");
Console.WriteLine($"Output size: {result.Length / 1024.0:F2} KB");
}

[Test]
public void ProfileCompressionOnly() {
Console.WriteLine("=== Profiling Compression ===");

// Create some data to compress
byte[] testData = new byte[10 * 1024 * 1024]; // 10MB of random data
Random rand = new Random(42);
rand.NextBytes(testData);

// Test Zstd compression
Stopwatch sw = Stopwatch.StartNew();
var compressor = new ZstdSharp.Compressor();
byte[] compressed = compressor.Wrap(testData).ToArray();
sw.Stop();

Console.WriteLine($"Compressed {testData.Length / 1024.0 / 1024.0:F2} MB in {sw.ElapsedMilliseconds}ms");
Console.WriteLine($"Compression ratio: {testData.Length / (double)compressed.Length:F2}x");
Console.WriteLine($"Throughput: {testData.Length / 1024.0 / 1024.0 / (sw.ElapsedMilliseconds / 1000.0):F2} MB/s");

// Test decompression
sw.Restart();
var decompressor = new ZstdSharp.Decompressor();
byte[] decompressed = new byte[testData.Length];
decompressor.Unwrap(compressed, decompressed);
sw.Stop();

Console.WriteLine($"Decompressed in {sw.ElapsedMilliseconds}ms");
Console.WriteLine($"Throughput: {testData.Length / 1024.0 / 1024.0 / (sw.ElapsedMilliseconds / 1000.0):F2} MB/s");
}
}
Loading