From 889eba1df501e190f9019cc42bad1606477c4cce Mon Sep 17 00:00:00 2001 From: Mattes D Date: Fri, 26 May 2023 12:59:31 +0200 Subject: Anvil: Allow loading chunks without HeightMap. --- src/WorldStorage/WSSAnvil.cpp | 50 +++++++++++++++++++------------------------ 1 file changed, 22 insertions(+), 28 deletions(-) (limited to 'src/WorldStorage/WSSAnvil.cpp') diff --git a/src/WorldStorage/WSSAnvil.cpp b/src/WorldStorage/WSSAnvil.cpp index 590647848..95ba1e4bf 100644 --- a/src/WorldStorage/WSSAnvil.cpp +++ b/src/WorldStorage/WSSAnvil.cpp @@ -1,8 +1,3 @@ - -// WSSAnvil.cpp - -// Implements the cWSSAnvil class representing the Anvil world storage scheme - #include "Globals.h" #include "WSSAnvil.h" #include "NBTChunkSerializer.h" @@ -85,7 +80,7 @@ Since only the header is actually in the memory, this number can be high, but st //////////////////////////////////////////////////////////////////////////////// // cWSSAnvil: -cWSSAnvil::cWSSAnvil(cWorld * a_World, int a_CompressionFactor) : +cWSSAnvil::cWSSAnvil(cWorld * a_World, int a_CompressionFactor): Super(a_World), m_Compressor(a_CompressionFactor) { @@ -178,7 +173,7 @@ bool cWSSAnvil::SaveChunk(const cChunkCoords & a_Chunk) -void cWSSAnvil::ChunkLoadFailed(int a_ChunkX, int a_ChunkZ, const AString & a_Reason, const ContiguousByteBufferView a_ChunkDataToSave) +void cWSSAnvil::ChunkLoadFailed(const cChunkCoords a_ChunkCoords, const AString & a_Reason, const ContiguousByteBufferView a_ChunkDataToSave) { // Construct the filename for offloading: auto OffloadFileName = fmt::format(FMT_STRING("{0}{1}region{1}badchunks"), m_World->GetDataPath(), cFile::PathSeparator()); @@ -192,16 +187,16 @@ void cWSSAnvil::ChunkLoadFailed(int a_ChunkX, int a_ChunkZ, const AString & a_Re #endif OffloadFileName.append(fmt::format( FMT_STRING("{}ch.{}.{}.{}-{:02d}-{:02d}-{:02d}-{:02d}-{:02d}.dat"), - cFile::PathSeparator(), a_ChunkX, a_ChunkZ, + cFile::PathSeparator(), a_ChunkCoords.m_ChunkX, a_ChunkCoords.m_ChunkZ, stm.tm_year + 1900, stm.tm_mon + 1, stm.tm_mday, stm.tm_hour, stm.tm_min, stm.tm_sec )); // Log the warning to console: - const int RegionX = FAST_FLOOR_DIV(a_ChunkX, 32); - const int RegionZ = FAST_FLOOR_DIV(a_ChunkZ, 32); + const int RegionX = FAST_FLOOR_DIV(a_ChunkCoords.m_ChunkX, 32); + const int RegionZ = FAST_FLOOR_DIV(a_ChunkCoords.m_ChunkZ, 32); auto Info = fmt::format( - FMT_STRING("Loading chunk [{}, {}] for world {} from file r.{}.{}.mca failed: {} Offloading old chunk data to file {} and regenerating chunk."), - a_ChunkX, a_ChunkZ, m_World->GetName(), RegionX, RegionZ, a_Reason, OffloadFileName + FMT_STRING("Loading chunk {} for world {} from file r.{}.{}.mca failed: {} Offloading old chunk data to file {} and regenerating chunk."), + a_ChunkCoords, m_World->GetName(), RegionX, RegionZ, a_Reason, OffloadFileName ); LOGWARNING("%s", Info); @@ -329,7 +324,7 @@ bool cWSSAnvil::LoadChunkFromData(const cChunkCoords & a_Chunk, const Contiguous } catch (const std::exception & Oops) { - ChunkLoadFailed(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, Oops.what(), a_Data); + ChunkLoadFailed(a_Chunk, Oops.what(), a_Data); return false; } } @@ -359,21 +354,21 @@ bool cWSSAnvil::LoadChunkFromNBT(const cChunkCoords & a_Chunk, const cParsedNBT int Level = a_NBT.FindChildByName(0, "Level"); if (Level < 0) { - ChunkLoadFailed(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, "Missing NBT tag: Level", a_RawChunkData); + ChunkLoadFailed(a_Chunk, "Missing NBT tag: Level", a_RawChunkData); return false; } int Sections = a_NBT.FindChildByName(Level, "Sections"); if ((Sections < 0) || (a_NBT.GetType(Sections) != TAG_List)) { - ChunkLoadFailed(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, "Missing NBT tag: Sections", a_RawChunkData); + ChunkLoadFailed(a_Chunk, "Missing NBT tag: Sections", a_RawChunkData); return false; } eTagType SectionsType = a_NBT.GetChildrenType(Sections); if ((SectionsType != TAG_Compound) && (SectionsType != TAG_End)) { - ChunkLoadFailed(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, "NBT tag has wrong type: Sections", a_RawChunkData); + ChunkLoadFailed(a_Chunk, "NBT tag has wrong type: Sections", a_RawChunkData); return false; } for (int Child = a_NBT.GetFirstChild(Sections); Child >= 0; Child = a_NBT.GetNextSibling(Child)) @@ -381,14 +376,14 @@ bool cWSSAnvil::LoadChunkFromNBT(const cChunkCoords & a_Chunk, const cParsedNBT const int SectionYTag = a_NBT.FindChildByName(Child, "Y"); if ((SectionYTag < 0) || (a_NBT.GetType(SectionYTag) != TAG_Byte)) { - ChunkLoadFailed(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, "NBT tag missing or has wrong: Y", a_RawChunkData); + ChunkLoadFailed(a_Chunk, "NBT tag missing or has wrong: Y", a_RawChunkData); return false; } const int Y = a_NBT.GetByte(SectionYTag); if ((Y < 0) || (Y > static_cast(cChunkDef::NumSections - 1))) { - ChunkLoadFailed(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, "NBT tag exceeds chunk bounds: Y", a_RawChunkData); + ChunkLoadFailed(a_Chunk, "NBT tag exceeds chunk bounds: Y", a_RawChunkData); return false; } @@ -404,7 +399,7 @@ bool cWSSAnvil::LoadChunkFromNBT(const cChunkCoords & a_Chunk, const cParsedNBT } else { - ChunkLoadFailed(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, "Missing chunk block/light data", a_RawChunkData); + ChunkLoadFailed(a_Chunk, "Missing chunk block/light data", a_RawChunkData); return false; } } // for itr - LevelSections[] @@ -412,15 +407,14 @@ bool cWSSAnvil::LoadChunkFromNBT(const cChunkCoords & a_Chunk, const cParsedNBT // Load the biomes from NBT, if present and valid: if (!LoadBiomeMapFromNBT(Data.BiomeMap, a_NBT, a_NBT.FindChildByName(Level, "Biomes"))) { - ChunkLoadFailed(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, "Missing chunk biome data", a_RawChunkData); + ChunkLoadFailed(a_Chunk, "Missing chunk biome data", a_RawChunkData); return false; } - // Height map too: + // Load the Height map, if it fails, recalculate it: if (!LoadHeightMapFromNBT(Data.HeightMap, a_NBT, a_NBT.FindChildByName(Level, "HeightMap"))) { - ChunkLoadFailed(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, "Missing chunk height data", a_RawChunkData); - return false; + Data.UpdateHeightMap(); } // Load the entities from NBT: @@ -3981,21 +3975,21 @@ bool cWSSAnvil::cMCAFile::GetChunkData(const cChunkCoords & a_Chunk, ContiguousB UInt32 ChunkSize = 0; if (m_File.Read(&ChunkSize, 4) != 4) { - m_ParentSchema.ChunkLoadFailed(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, "Cannot read chunk size", {}); + m_ParentSchema.ChunkLoadFailed(a_Chunk, "Cannot read chunk size", {}); return false; } ChunkSize = ntohl(ChunkSize); if (ChunkSize < 1) { // Chunk size too small - m_ParentSchema.ChunkLoadFailed(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, "Chunk size too small", {}); + m_ParentSchema.ChunkLoadFailed(a_Chunk, "Chunk size too small", {}); return false; } char CompressionType = 0; if (m_File.Read(&CompressionType, 1) != 1) { - m_ParentSchema.ChunkLoadFailed(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, "Cannot read chunk compression", {}); + m_ParentSchema.ChunkLoadFailed(a_Chunk, "Cannot read chunk compression", {}); return false; } ChunkSize--; @@ -4003,14 +3997,14 @@ bool cWSSAnvil::cMCAFile::GetChunkData(const cChunkCoords & a_Chunk, ContiguousB a_Data = m_File.Read(ChunkSize); if (a_Data.size() != ChunkSize) { - m_ParentSchema.ChunkLoadFailed(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, "Cannot read entire chunk data", a_Data); + m_ParentSchema.ChunkLoadFailed(a_Chunk, "Cannot read entire chunk data", a_Data); return false; } if (CompressionType != 2) { // Chunk is in an unknown compression - m_ParentSchema.ChunkLoadFailed(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, fmt::format(FMT_STRING("Unknown chunk compression: {}"), CompressionType), a_Data); + m_ParentSchema.ChunkLoadFailed(a_Chunk, fmt::format(FMT_STRING("Unknown chunk compression: {}"), CompressionType), a_Data); return false; } return true; -- cgit v1.2.3