From 3e741134279e02d204a8d4638f2d46dfbf814d0c Mon Sep 17 00:00:00 2001 From: Tiger Wang Date: Fri, 12 Sep 2014 23:18:02 +0100 Subject: Implemented Chest Minecarts --- src/WorldStorage/NBTChunkSerializer.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src/WorldStorage') diff --git a/src/WorldStorage/NBTChunkSerializer.cpp b/src/WorldStorage/NBTChunkSerializer.cpp index 68e541eba..963e016d6 100644 --- a/src/WorldStorage/NBTChunkSerializer.cpp +++ b/src/WorldStorage/NBTChunkSerializer.cpp @@ -738,7 +738,7 @@ void cNBTChunkSerializer::AddItemFrameEntity(cItemFrame * a_ItemFrame) void cNBTChunkSerializer::AddMinecartChestContents(cMinecartWithChest * a_Minecart) { m_Writer.BeginList("Items", TAG_Compound); - for (int i = 0; i < cMinecartWithChest::NumSlots; i++) + for (int i = 0; i < cMinecartWithChest::ContentsHeight * cMinecartWithChest::ContentsWidth; i++) { const cItem & Item = a_Minecart->GetSlot(i); if (Item.IsEmpty()) -- cgit v1.2.3 From 9e4a5f824ae88e3a19de6a14db91b43c1759e29e Mon Sep 17 00:00:00 2001 From: Tiger Wang Date: Tue, 30 Sep 2014 22:20:21 +0100 Subject: Removed WSSCompact --- src/WorldStorage/CMakeLists.txt | 2 - src/WorldStorage/WSSCompact.cpp | 1066 ------------------------------------- src/WorldStorage/WSSCompact.h | 157 ------ src/WorldStorage/WorldStorage.cpp | 2 - 4 files changed, 1227 deletions(-) delete mode 100644 src/WorldStorage/WSSCompact.cpp delete mode 100644 src/WorldStorage/WSSCompact.h (limited to 'src/WorldStorage') diff --git a/src/WorldStorage/CMakeLists.txt b/src/WorldStorage/CMakeLists.txt index a00ff3b2f..59193db2a 100644 --- a/src/WorldStorage/CMakeLists.txt +++ b/src/WorldStorage/CMakeLists.txt @@ -14,7 +14,6 @@ SET (SRCS ScoreboardSerializer.cpp StatSerializer.cpp WSSAnvil.cpp - WSSCompact.cpp WorldStorage.cpp) SET (HDRS @@ -27,7 +26,6 @@ SET (HDRS ScoreboardSerializer.h StatSerializer.h WSSAnvil.h - WSSCompact.h WorldStorage.h) if(NOT MSVC) diff --git a/src/WorldStorage/WSSCompact.cpp b/src/WorldStorage/WSSCompact.cpp deleted file mode 100644 index 6760186b2..000000000 --- a/src/WorldStorage/WSSCompact.cpp +++ /dev/null @@ -1,1066 +0,0 @@ - -// WSSCompact.cpp - -// Interfaces to the cWSSCompact class representing the "compact" storage schema (PAK-files) - -#include "Globals.h" -#include "WSSCompact.h" -#include "../World.h" -#include "zlib/zlib.h" -#include "json/json.h" -#include "../StringCompression.h" -#include "../BlockEntities/BeaconEntity.h" -#include "../BlockEntities/ChestEntity.h" -#include "../BlockEntities/CommandBlockEntity.h" -#include "../BlockEntities/DispenserEntity.h" -#include "../BlockEntities/FlowerPotEntity.h" -#include "../BlockEntities/FurnaceEntity.h" -#include "../BlockEntities/JukeboxEntity.h" -#include "../BlockEntities/MobHeadEntity.h" -#include "../BlockEntities/NoteEntity.h" -#include "../BlockEntities/SignEntity.h" -#include "../SetChunkData.h" - - - - - -#pragma pack(push, 1) -/// The chunk header, as stored in the file: -struct cWSSCompact::sChunkHeader -{ - int m_ChunkX; - int m_ChunkZ; - int m_CompressedSize; - int m_UncompressedSize; -} ; -#pragma pack(pop) - - - - - -/// The maximum number of PAK files that are cached -const size_t MAX_PAK_FILES = 16; - -/// The maximum number of unsaved chunks before the cPAKFile saves them to disk -const int MAX_DIRTY_CHUNKS = 16; - - - - - -//////////////////////////////////////////////////////////////////////////////// -// cJsonChunkSerializer: - -cJsonChunkSerializer::cJsonChunkSerializer(void) : - m_HasJsonData(false) -{ -} - - - - - -void cJsonChunkSerializer::Entity(cEntity * a_Entity) -{ - // TODO: a_Entity->SaveToJson(m_Root); -} - - - - - -void cJsonChunkSerializer::BlockEntity(cBlockEntity * a_BlockEntity) -{ - const char * SaveInto = NULL; - switch (a_BlockEntity->GetBlockType()) - { - case E_BLOCK_BEACON: SaveInto = "Beacons"; break; - case E_BLOCK_CHEST: SaveInto = "Chests"; break; - case E_BLOCK_DISPENSER: SaveInto = "Dispensers"; break; - case E_BLOCK_DROPPER: SaveInto = "Droppers"; break; - case E_BLOCK_FLOWER_POT: SaveInto = "FlowerPots"; break; - case E_BLOCK_FURNACE: SaveInto = "Furnaces"; break; - case E_BLOCK_SIGN_POST: SaveInto = "Signs"; break; - case E_BLOCK_WALLSIGN: SaveInto = "Signs"; break; - case E_BLOCK_NOTE_BLOCK: SaveInto = "Notes"; break; - case E_BLOCK_JUKEBOX: SaveInto = "Jukeboxes"; break; - case E_BLOCK_COMMAND_BLOCK: SaveInto = "CommandBlocks"; break; - case E_BLOCK_HEAD: SaveInto = "MobHeads"; break; - - default: - { - ASSERT(!"Unhandled blocktype in BlockEntities list while saving to JSON"); - break; - } - } // switch (BlockEntity->GetBlockType()) - if (SaveInto == NULL) - { - return; - } - - Json::Value val; - a_BlockEntity->SaveToJson(val); - m_Root[SaveInto].append(val); - m_HasJsonData = true; -} - - - - - -void cJsonChunkSerializer::LightIsValid(bool a_IsLightValid) -{ - if (a_IsLightValid) - { - m_Root["IsLightValid"] = true; - m_HasJsonData = true; - } -} - - - - - -//////////////////////////////////////////////////////////////////////////////// -// cWSSCompact: - -cWSSCompact::~cWSSCompact() -{ - for (cPAKFiles::iterator itr = m_PAKFiles.begin(); itr != m_PAKFiles.end(); ++itr) - { - delete *itr; - } -} - - - - - -bool cWSSCompact::LoadChunk(const cChunkCoords & a_Chunk) -{ - AString ChunkData; - int UncompressedSize = 0; - if (!GetChunkData(a_Chunk, UncompressedSize, ChunkData)) - { - // The reason for failure is already printed in GetChunkData() - return false; - } - - return LoadChunkFromData(a_Chunk, UncompressedSize, ChunkData, m_World); -} - - - - - -bool cWSSCompact::SaveChunk(const cChunkCoords & a_Chunk) -{ - cCSLock Lock(m_CS); - - cPAKFile * f = LoadPAKFile(a_Chunk); - if (f == NULL) - { - // For some reason we couldn't locate the file - LOG("Cannot locate a proper PAK file for chunk [%d, %d]", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ); - return false; - } - return f->SaveChunk(a_Chunk, m_World); -} - - - - - -cWSSCompact::cPAKFile * cWSSCompact::LoadPAKFile(const cChunkCoords & a_Chunk) -{ - // ASSUMES that m_CS has been locked - - // We need to retain this weird conversion code, because some edge chunks are in the wrong PAK file - const int LayerX = FAST_FLOOR_DIV(a_Chunk.m_ChunkX, 32); - const int LayerZ = FAST_FLOOR_DIV(a_Chunk.m_ChunkZ, 32); - - // Is it already cached? - for (cPAKFiles::iterator itr = m_PAKFiles.begin(); itr != m_PAKFiles.end(); ++itr) - { - if (((*itr) != NULL) && ((*itr)->GetLayerX() == LayerX) && ((*itr)->GetLayerZ() == LayerZ)) - { - // Move the file to front and return it: - cPAKFile * f = *itr; - if (itr != m_PAKFiles.begin()) - { - m_PAKFiles.erase(itr); - m_PAKFiles.push_front(f); - } - return f; - } - } - - // Load it anew: - AString FileName; - Printf(FileName, "%s/X%i_Z%i.pak", m_World->GetName().c_str(), LayerX, LayerZ); - cPAKFile * f = new cPAKFile(FileName, LayerX, LayerZ, m_CompressionFactor); - if (f == NULL) - { - return NULL; - } - m_PAKFiles.push_front(f); - - // If there are too many PAK files cached, delete the last one used: - if (m_PAKFiles.size() > MAX_PAK_FILES) - { - delete m_PAKFiles.back(); - m_PAKFiles.pop_back(); - } - return f; -} - - - - - -bool cWSSCompact::GetChunkData(const cChunkCoords & a_Chunk, int & a_UncompressedSize, AString & a_Data) -{ - cCSLock Lock(m_CS); - cPAKFile * f = LoadPAKFile(a_Chunk); - if (f == NULL) - { - return false; - } - return f->GetChunkData(a_Chunk, a_UncompressedSize, a_Data); -} - - - - - -/* -// TODO: Rewrite saving to use the same principles as loading -bool cWSSCompact::SetChunkData(const cChunkCoords & a_Chunk, int a_UncompressedSize, const AString & a_Data) -{ - cCSLock Lock(m_CS); - cPAKFile * f = LoadPAKFile(a_Chunk); - if (f == NULL) - { - return false; - } - return f->SetChunkData(a_Chunk, a_UncompressedSize, a_Data); -} -*/ - - - - - -bool cWSSCompact::EraseChunkData(const cChunkCoords & a_Chunk) -{ - cCSLock Lock(m_CS); - cPAKFile * f = LoadPAKFile(a_Chunk); - if (f == NULL) - { - return false; - } - return f->EraseChunkData(a_Chunk); -} - - - - - -void cWSSCompact::LoadEntitiesFromJson(Json::Value & a_Value, cEntityList & a_Entities, cBlockEntityList & a_BlockEntities, cWorld * a_World) -{ - // Load beacon: - Json::Value AllBeacons = a_Value.get("Beacons", Json::nullValue); - if (!AllBeacons.empty()) - { - for (Json::Value::iterator itr = AllBeacons.begin(); itr != AllBeacons.end(); ++itr) - { - std::auto_ptr BeaconEntity(new cBeaconEntity(0, 0, 0, a_World)); - if (!BeaconEntity->LoadFromJson(*itr)) - { - LOGWARNING("ERROR READING BEACON FROM JSON!"); - } - else - { - a_BlockEntities.push_back(BeaconEntity.release()); - } - } // for itr - AllBeacons[] - } - - // Load chests: - Json::Value AllChests = a_Value.get("Chests", Json::nullValue); - if (!AllChests.empty()) - { - for (Json::Value::iterator itr = AllChests.begin(); itr != AllChests.end(); ++itr) - { - std::auto_ptr ChestEntity(new cChestEntity(0, 0, 0, a_World, E_BLOCK_CHEST)); - if (!ChestEntity->LoadFromJson(*itr)) - { - LOGWARNING("ERROR READING CHEST FROM JSON!"); - } - else - { - a_BlockEntities.push_back(ChestEntity.release()); - } - } // for itr - AllChests[] - } - - // Load dispensers: - Json::Value AllDispensers = a_Value.get("Dispensers", Json::nullValue); - for (Json::Value::iterator itr = AllDispensers.begin(); itr != AllDispensers.end(); ++itr) - { - std::auto_ptr DispenserEntity(new cDispenserEntity(0, 0, 0, a_World)); - if (!DispenserEntity->LoadFromJson(*itr)) - { - LOGWARNING("ERROR READING DISPENSER FROM JSON!"); - } - else - { - a_BlockEntities.push_back(DispenserEntity.release()); - } - } // for itr - AllDispensers[] - - // Load Flowerpots: - Json::Value AllFlowerPots = a_Value.get("FlowerPots", Json::nullValue); - for (Json::Value::iterator itr = AllFlowerPots.begin(); itr != AllFlowerPots.end(); ++itr) - { - std::auto_ptr FlowerPotEntity(new cFlowerPotEntity(0, 0, 0, a_World)); - if (!FlowerPotEntity->LoadFromJson(*itr)) - { - LOGWARNING("ERROR READING FLOWERPOT FROM JSON!"); - } - else - { - a_BlockEntities.push_back(FlowerPotEntity.release()); - } - } // for itr - AllFlowerPots[] - - // Load furnaces: - Json::Value AllFurnaces = a_Value.get("Furnaces", Json::nullValue); - for (Json::Value::iterator itr = AllFurnaces.begin(); itr != AllFurnaces.end(); ++itr) - { - // TODO: The block type and meta aren't correct, there's no way to get them here - std::auto_ptr FurnaceEntity(new cFurnaceEntity(0, 0, 0, E_BLOCK_FURNACE, 0, a_World)); - if (!FurnaceEntity->LoadFromJson(*itr)) - { - LOGWARNING("ERROR READING FURNACE FROM JSON!"); - } - else - { - a_BlockEntities.push_back(FurnaceEntity.release()); - } - } // for itr - AllFurnaces[] - - // Load signs: - Json::Value AllSigns = a_Value.get("Signs", Json::nullValue); - for (Json::Value::iterator itr = AllSigns.begin(); itr != AllSigns.end(); ++itr) - { - std::auto_ptr SignEntity(new cSignEntity(E_BLOCK_SIGN_POST, 0, 0, 0, a_World)); - if (!SignEntity->LoadFromJson(*itr)) - { - LOGWARNING("ERROR READING SIGN FROM JSON!"); - } - else - { - a_BlockEntities.push_back(SignEntity.release()); - } - } // for itr - AllSigns[] - - // Load note blocks: - Json::Value AllNotes = a_Value.get("Notes", Json::nullValue); - for (Json::Value::iterator itr = AllNotes.begin(); itr != AllNotes.end(); ++itr) - { - std::auto_ptr NoteEntity(new cNoteEntity(0, 0, 0, a_World)); - if (!NoteEntity->LoadFromJson(*itr)) - { - LOGWARNING("ERROR READING NOTE BLOCK FROM JSON!"); - } - else - { - a_BlockEntities.push_back(NoteEntity.release()); - } - } // for itr - AllNotes[] - - // Load jukeboxes: - Json::Value AllJukeboxes = a_Value.get("Jukeboxes", Json::nullValue); - for (Json::Value::iterator itr = AllJukeboxes.begin(); itr != AllJukeboxes.end(); ++itr) - { - std::auto_ptr JukeboxEntity(new cJukeboxEntity(0, 0, 0, a_World)); - if (!JukeboxEntity->LoadFromJson(*itr)) - { - LOGWARNING("ERROR READING JUKEBOX FROM JSON!"); - } - else - { - a_BlockEntities.push_back(JukeboxEntity.release()); - } - } // for itr - AllJukeboxes[] - - // Load command blocks: - Json::Value AllCommandBlocks = a_Value.get("CommandBlocks", Json::nullValue); - for (Json::Value::iterator itr = AllCommandBlocks.begin(); itr != AllCommandBlocks.end(); ++itr) - { - std::auto_ptr CommandBlockEntity(new cCommandBlockEntity(0, 0, 0, a_World)); - if (!CommandBlockEntity->LoadFromJson(*itr)) - { - LOGWARNING("ERROR READING COMMAND BLOCK FROM JSON!"); - } - else - { - a_BlockEntities.push_back(CommandBlockEntity.release()); - } - } // for itr - AllCommandBlocks[] - - // Load mob heads: - Json::Value AllMobHeads = a_Value.get("MobHeads", Json::nullValue); - for (Json::Value::iterator itr = AllMobHeads.begin(); itr != AllMobHeads.end(); ++itr) - { - std::auto_ptr MobHeadEntity(new cMobHeadEntity(0, 0, 0, a_World)); - if (!MobHeadEntity->LoadFromJson(*itr)) - { - LOGWARNING("ERROR READING MOB HEAD FROM JSON!"); - } - else - { - a_BlockEntities.push_back(MobHeadEntity.release()); - } - } // for itr - AllMobHeads[] -} - - - - - -//////////////////////////////////////////////////////////////////////////////// -// cWSSCompact::cPAKFile - -#define READ(Var) \ - if (f.Read(&Var, sizeof(Var)) != sizeof(Var)) \ - { \ - LOGERROR("ERROR READING %s FROM FILE %s (line %d); file offset %d", #Var, m_FileName.c_str(), __LINE__, f.Tell()); \ - return; \ - } - -cWSSCompact::cPAKFile::cPAKFile(const AString & a_FileName, int a_LayerX, int a_LayerZ, int a_CompressionFactor) : - m_FileName(a_FileName), - m_CompressionFactor(a_CompressionFactor), - m_LayerX(a_LayerX), - m_LayerZ(a_LayerZ), - m_NumDirty(0), - m_ChunkVersion( CHUNK_VERSION), // Init with latest version - m_PakVersion( PAK_VERSION) -{ - cFile f; - if (!f.Open(m_FileName, cFile::fmRead)) - { - return; - } - - // Read headers: - READ(m_PakVersion); - if (m_PakVersion != 1) - { - LOGERROR("File \"%s\" is in an unknown pak format (%d)", m_FileName.c_str(), m_PakVersion); - return; - } - - READ(m_ChunkVersion); - switch (m_ChunkVersion) - { - case 1: - { - m_ChunkSize.Set(16, 128, 16); - break; - } - case 2: - case 3: - { - m_ChunkSize.Set(16, 256, 16); - break; - } - default: - { - LOGERROR("File \"%s\" is in an unknown chunk format (%d)", m_FileName.c_str(), m_ChunkVersion); - return; - } - }; - - short NumChunks = 0; - READ(NumChunks); - - // Read chunk headers: - for (int i = 0; i < NumChunks; i++) - { - sChunkHeader * Header = new sChunkHeader; - - // Here we do not use the READ macro, as it does not free the resources - // allocated with new in case of error. - if (f.Read(Header, sizeof(*Header)) != sizeof(*Header)) - { - LOGERROR("ERROR READING %s FROM FILE %s (line %d); file offset %d", "Header", m_FileName.c_str(), __LINE__, f.Tell()); - delete Header; - Header = NULL; - return; - } - m_ChunkHeaders.push_back(Header); - } // for i - chunk headers - - // Read chunk data: - if (f.ReadRestOfFile(m_DataContents) == -1) - { - LOGERROR("Cannot read file \"%s\" contents", m_FileName.c_str()); - return; - } - - if (m_ChunkVersion == 1) // Convert chunks to version 2 - { - UpdateChunk1To2(); - } -#if AXIS_ORDER == AXIS_ORDER_XZY - if (m_ChunkVersion == 2) // Convert chunks to version 3 - { - UpdateChunk2To3(); - } -#endif -} - - - - - -cWSSCompact::cPAKFile::~cPAKFile() -{ - if (m_NumDirty > 0) - { - SynchronizeFile(); - } - for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr) - { - delete *itr; - } -} - - - - - -bool cWSSCompact::cPAKFile::GetChunkData(const cChunkCoords & a_Chunk, int & a_UncompressedSize, AString & a_Data) -{ - int ChunkX = a_Chunk.m_ChunkX; - int ChunkZ = a_Chunk.m_ChunkZ; - sChunkHeader * Header = NULL; - int Offset = 0; - for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr) - { - if (((*itr)->m_ChunkX == ChunkX) && ((*itr)->m_ChunkZ == ChunkZ)) - { - Header = *itr; - break; - } - Offset += (*itr)->m_CompressedSize; - } - if ((Header == NULL) || (Offset + Header->m_CompressedSize > (int)m_DataContents.size())) - { - // Chunk not found / data invalid - return false; - } - - a_UncompressedSize = Header->m_UncompressedSize; - a_Data.assign(m_DataContents, Offset, Header->m_CompressedSize); - return true; -} - - - - - -bool cWSSCompact::cPAKFile::SaveChunk(const cChunkCoords & a_Chunk, cWorld * a_World) -{ - if (!SaveChunkToData(a_Chunk, a_World)) - { - return false; - } - if (m_NumDirty > MAX_DIRTY_CHUNKS) - { - SynchronizeFile(); - } - return true; -} - - - - - -void cWSSCompact::cPAKFile::UpdateChunk1To2() -{ - int Offset = 0; - AString NewDataContents; - int ChunksConverted = 0; - for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr) - { - sChunkHeader * Header = *itr; - - if (ChunksConverted % 32 == 0) - { - LOGINFO("Updating \"%s\" version 1 to version 2: " SIZE_T_FMT " %%", m_FileName.c_str(), (ChunksConverted * 100) / m_ChunkHeaders.size()); - } - ChunksConverted++; - - AString Data; - int UncompressedSize = Header->m_UncompressedSize; - Data.assign(m_DataContents, Offset, Header->m_CompressedSize); - Offset += Header->m_CompressedSize; - - // Crude data integrity check: - int ExpectedSize = (16*128*16)*2 + (16*128*16)/2; // For version 1 - if (UncompressedSize < ExpectedSize) - { - LOGWARNING("Chunk [%d, %d] has too short decompressed data (%d bytes out of %d needed), erasing", - Header->m_ChunkX, Header->m_ChunkZ, - UncompressedSize, ExpectedSize - ); - Offset += Header->m_CompressedSize; - continue; - } - - // Decompress the data: - AString UncompressedData; - { - int errorcode = UncompressString(Data.data(), Data.size(), UncompressedData, (size_t)UncompressedSize); - if (errorcode != Z_OK) - { - LOGERROR("Error %d decompressing data for chunk [%d, %d]", - errorcode, - Header->m_ChunkX, Header->m_ChunkZ - ); - Offset += Header->m_CompressedSize; - continue; - } - } - - if (UncompressedSize != (int)UncompressedData.size()) - { - LOGWARNING("Uncompressed data size differs (exp %d bytes, got " SIZE_T_FMT ") for chunk [%d, %d]", - UncompressedSize, UncompressedData.size(), - Header->m_ChunkX, Header->m_ChunkZ - ); - Offset += Header->m_CompressedSize; - continue; - } - - - // Old version is 128 blocks high with YZX axis order - char ConvertedData[cChunkDef::BlockDataSize]; - int Index = 0; - unsigned int InChunkOffset = 0; - for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) - { - for (int y = 0; y < 128; ++y) - { - ConvertedData[Index++] = UncompressedData[y + z * 128 + x * 128 * 16 + InChunkOffset]; - } - // Add 128 empty blocks after an old y column - memset(ConvertedData + Index, E_BLOCK_AIR, 128); - Index += 128; - } - InChunkOffset += (16 * 128 * 16); - for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) // Metadata - { - for (int y = 0; y < 64; ++y) - { - ConvertedData[Index++] = UncompressedData[y + z * 64 + x * 64 * 16 + InChunkOffset]; - } - memset(ConvertedData + Index, 0, 64); - Index += 64; - } - InChunkOffset += (16 * 128 * 16) / 2; - for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) // Block light - { - for (int y = 0; y < 64; ++y) - { - ConvertedData[Index++] = UncompressedData[y + z * 64 + x * 64 * 16 + InChunkOffset]; - } - memset(ConvertedData + Index, 0, 64); - Index += 64; - } - InChunkOffset += (16*128*16)/2; - for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) // Sky light - { - for (int y = 0; y < 64; ++y) - { - ConvertedData[Index++] = UncompressedData[y + z * 64 + x * 64 * 16 + InChunkOffset]; - } - memset(ConvertedData + Index, 0, 64); - Index += 64; - } - InChunkOffset += (16 * 128 * 16) / 2; - - AString Converted(ConvertedData, ARRAYCOUNT(ConvertedData)); - - // Add JSON data afterwards - if (UncompressedData.size() > InChunkOffset) - { - Converted.append( UncompressedData.begin() + InChunkOffset, UncompressedData.end()); - } - - // Re-compress data - AString CompressedData; - { - int errorcode = CompressString(Converted.data(), Converted.size(), CompressedData, m_CompressionFactor); - if (errorcode != Z_OK) - { - LOGERROR("Error %d compressing data for chunk [%d, %d]", - errorcode, - Header->m_ChunkX, Header->m_ChunkZ - ); - continue; - } - } - - // Save into file's cache - Header->m_UncompressedSize = (int)Converted.size(); - Header->m_CompressedSize = (int)CompressedData.size(); - NewDataContents.append(CompressedData); - } - - // Done converting - m_DataContents = NewDataContents; - m_ChunkVersion = 2; - SynchronizeFile(); - - LOGINFO("Updated \"%s\" version 1 to version 2", m_FileName.c_str()); -} - - - - - -void cWSSCompact::cPAKFile::UpdateChunk2To3() -{ - int Offset = 0; - AString NewDataContents; - int ChunksConverted = 0; - for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr) - { - sChunkHeader * Header = *itr; - - if (ChunksConverted % 32 == 0) - { - LOGINFO("Updating \"%s\" version 2 to version 3: " SIZE_T_FMT " %%", m_FileName.c_str(), (ChunksConverted * 100) / m_ChunkHeaders.size()); - } - ChunksConverted++; - - AString Data; - int UncompressedSize = Header->m_UncompressedSize; - Data.assign(m_DataContents, Offset, Header->m_CompressedSize); - Offset += Header->m_CompressedSize; - - // Crude data integrity check: - const int ExpectedSize = (16 * 256 * 16) * 2 + (16 * 256 * 16) / 2; // For version 2 - if (UncompressedSize < ExpectedSize) - { - LOGWARNING("Chunk [%d, %d] has too short decompressed data (%d bytes out of %d needed), erasing", - Header->m_ChunkX, Header->m_ChunkZ, - UncompressedSize, ExpectedSize - ); - Offset += Header->m_CompressedSize; - continue; - } - - // Decompress the data: - AString UncompressedData; - { - int errorcode = UncompressString(Data.data(), Data.size(), UncompressedData, (size_t)UncompressedSize); - if (errorcode != Z_OK) - { - LOGERROR("Error %d decompressing data for chunk [%d, %d]", - errorcode, - Header->m_ChunkX, Header->m_ChunkZ - ); - Offset += Header->m_CompressedSize; - continue; - } - } - - if (UncompressedSize != (int)UncompressedData.size()) - { - LOGWARNING("Uncompressed data size differs (exp %d bytes, got " SIZE_T_FMT ") for chunk [%d, %d]", - UncompressedSize, UncompressedData.size(), - Header->m_ChunkX, Header->m_ChunkZ - ); - Offset += Header->m_CompressedSize; - continue; - } - - char ConvertedData[ExpectedSize]; - memset(ConvertedData, 0, ExpectedSize); - - // Cannot use cChunk::MakeIndex because it might change again????????? - // For compatibility, use what we know is current - #define MAKE_3_INDEX( x, y, z) ( x + (z * 16) + (y * 16 * 16)) - - unsigned int InChunkOffset = 0; - for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) for (int y = 0; y < 256; ++y) // YZX Loop order is important, in 1.1 Y was first then Z then X - { - ConvertedData[ MAKE_3_INDEX(x, y, z) ] = UncompressedData[InChunkOffset]; - ++InChunkOffset; - } // for y, z, x - - - unsigned int index2 = 0; - for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) for (int y = 0; y < 256; ++y) - { - ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4)) & 0x0f) << ((x&1)*4); - ++index2; - } - InChunkOffset += index2 / 2; - index2 = 0; - - for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) for (int y = 0; y < 256; ++y) - { - ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4)) & 0x0f) << ((x&1)*4); - ++index2; - } - InChunkOffset += index2 / 2; - index2 = 0; - - for (int x = 0; x < 16; ++x) for (int z = 0; z < 16; ++z) for (int y = 0; y < 256; ++y) - { - ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4)) & 0x0f) << ((x&1)*4); - ++index2; - } - InChunkOffset += index2 / 2; - - AString Converted(ConvertedData, ExpectedSize); - - // Add JSON data afterwards - if (UncompressedData.size() > InChunkOffset) - { - Converted.append( UncompressedData.begin() + InChunkOffset, UncompressedData.end()); - } - - // Re-compress data - AString CompressedData; - { - int errorcode = CompressString(Converted.data(), Converted.size(), CompressedData, m_CompressionFactor); - if (errorcode != Z_OK) - { - LOGERROR("Error %d compressing data for chunk [%d, %d]", - errorcode, - Header->m_ChunkX, Header->m_ChunkZ - ); - continue; - } - } - - // Save into file's cache - Header->m_UncompressedSize = (int)Converted.size(); - Header->m_CompressedSize = (int)CompressedData.size(); - NewDataContents.append(CompressedData); - } - - // Done converting - m_DataContents = NewDataContents; - m_ChunkVersion = 3; - SynchronizeFile(); - - LOGINFO("Updated \"%s\" version 2 to version 3", m_FileName.c_str()); -} - - - - - -bool cWSSCompact::LoadChunkFromData(const cChunkCoords & a_Chunk, int a_UncompressedSize, const AString & a_Data, cWorld * a_World) -{ - // Crude data integrity check: - if (a_UncompressedSize < cChunkDef::BlockDataSize) - { - LOGWARNING("Chunk [%d, %d] has too short decompressed data (%d bytes out of %d needed), erasing", - a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, - a_UncompressedSize, cChunkDef::BlockDataSize - ); - EraseChunkData(a_Chunk); - return false; - } - - // Decompress the data: - AString UncompressedData; - int errorcode = UncompressString(a_Data.data(), a_Data.size(), UncompressedData, (size_t)a_UncompressedSize); - if (errorcode != Z_OK) - { - LOGERROR("Error %d decompressing data for chunk [%d, %d]", - errorcode, - a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ - ); - return false; - } - - if (a_UncompressedSize != (int)UncompressedData.size()) - { - LOGWARNING("Uncompressed data size differs (exp %d bytes, got " SIZE_T_FMT ") for chunk [%d, %d]", - a_UncompressedSize, UncompressedData.size(), - a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ - ); - return false; - } - - cEntityList Entities; - cBlockEntityList BlockEntities; - bool IsLightValid = false; - - if (a_UncompressedSize > cChunkDef::BlockDataSize) - { - Json::Value root; // will contain the root value after parsing. - Json::Reader reader; - if (!reader.parse( UncompressedData.data() + cChunkDef::BlockDataSize, root, false)) - { - LOGERROR("Failed to parse trailing JSON in chunk [%d, %d]!", - a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ - ); - } - else - { - LoadEntitiesFromJson(root, Entities, BlockEntities, a_World); - IsLightValid = root.get("IsLightValid", false).asBool(); - } - } - - BLOCKTYPE * BlockData = (BLOCKTYPE *)UncompressedData.data(); - NIBBLETYPE * MetaData = (NIBBLETYPE *)(BlockData + MetaOffset); - NIBBLETYPE * BlockLight = (NIBBLETYPE *)(BlockData + LightOffset); - NIBBLETYPE * SkyLight = (NIBBLETYPE *)(BlockData + SkyLightOffset); - - a_World->QueueSetChunkData(cSetChunkDataPtr(new cSetChunkData( - a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, - BlockData, MetaData, - IsLightValid ? BlockLight : NULL, - IsLightValid ? SkyLight : NULL, - NULL, NULL, - Entities, BlockEntities, - false - ))); - - return true; -} - - - - - -bool cWSSCompact::cPAKFile::EraseChunkData(const cChunkCoords & a_Chunk) -{ - int ChunkX = a_Chunk.m_ChunkX; - int ChunkZ = a_Chunk.m_ChunkZ; - int Offset = 0; - for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr) - { - if (((*itr)->m_ChunkX == ChunkX) && ((*itr)->m_ChunkZ == ChunkZ)) - { - m_DataContents.erase(Offset, (*itr)->m_CompressedSize); - delete *itr; - itr = m_ChunkHeaders.erase(itr); - return true; - } - Offset += (*itr)->m_CompressedSize; - } - - return false; -} - - - - - -bool cWSSCompact::cPAKFile::SaveChunkToData(const cChunkCoords & a_Chunk, cWorld * a_World) -{ - // Serialize the chunk: - cJsonChunkSerializer Serializer; - if (!a_World->GetChunkData(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, Serializer)) - { - // Chunk not valid - LOG("cWSSCompact: Trying to save chunk [%d, %d] that has no data, ignoring request.", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ); - return false; - } - - AString Data; - Data.assign((const char *)Serializer.GetBlockData(), cChunkDef::BlockDataSize); - if (Serializer.HasJsonData()) - { - AString JsonData; - Json::StyledWriter writer; - JsonData = writer.write(Serializer.GetRoot()); - Data.append(JsonData); - } - - // Compress the data: - AString CompressedData; - int errorcode = CompressString(Data.data(), Data.size(), CompressedData, m_CompressionFactor); - if (errorcode != Z_OK) - { - LOGERROR("Error %i compressing data for chunk [%d, %d]", errorcode, a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ); - return false; - } - - // Erase any existing data for the chunk: - EraseChunkData(a_Chunk); - - // Save the header: - sChunkHeader * Header = new sChunkHeader; - if (Header == NULL) - { - LOGWARNING("Cannot create a new chunk header to save chunk [%d, %d]", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ); - return false; - } - Header->m_CompressedSize = (int)CompressedData.size(); - Header->m_ChunkX = a_Chunk.m_ChunkX; - Header->m_ChunkZ = a_Chunk.m_ChunkZ; - Header->m_UncompressedSize = (int)Data.size(); - m_ChunkHeaders.push_back(Header); - - m_DataContents.append(CompressedData.data(), CompressedData.size()); - - m_NumDirty++; - return true; -} - - - - - -#define WRITE(Var) \ - if (f.Write(&Var, sizeof(Var)) != sizeof(Var)) \ - { \ - LOGERROR("cWSSCompact: ERROR writing %s to file \"%s\" (line %d); file offset %d", #Var, m_FileName.c_str(), __LINE__, f.Tell()); \ - return; \ - } - -void cWSSCompact::cPAKFile::SynchronizeFile(void) -{ - cFile f; - if (!f.Open(m_FileName, cFile::fmWrite)) - { - LOGERROR("Cannot open PAK file \"%s\" for writing", m_FileName.c_str()); - return; - } - - WRITE(m_PakVersion); - WRITE(m_ChunkVersion); - short NumChunks = (short)m_ChunkHeaders.size(); - WRITE(NumChunks); - for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr) - { - WRITE(**itr); - } - if (f.Write(m_DataContents.data(), m_DataContents.size()) != (int)m_DataContents.size()) - { - LOGERROR("cWSSCompact: ERROR writing chunk contents to file \"%s\" (line %d); file offset %d", m_FileName.c_str(), __LINE__, f.Tell()); - return; - } - m_NumDirty = 0; -} - - - - diff --git a/src/WorldStorage/WSSCompact.h b/src/WorldStorage/WSSCompact.h deleted file mode 100644 index 83e9cb49f..000000000 --- a/src/WorldStorage/WSSCompact.h +++ /dev/null @@ -1,157 +0,0 @@ - -// WSSCompact.h - -// Interfaces to the cWSSCompact class representing the "Compact" storage schema (PAK-files) - - - - - -#pragma once -#ifndef WSSCOMPACT_H_INCLUDED -#define WSSCOMPACT_H_INCLUDED - -#include "WorldStorage.h" -#include "../Vector3.h" -#include "json/json.h" -#include "ChunkDataCallback.h" - - - - - -/// Helper class for serializing a chunk into Json -class cJsonChunkSerializer : - public cChunkDataArrayCollector -{ -public: - - cJsonChunkSerializer(void); - - Json::Value & GetRoot (void) {return m_Root; } - BLOCKTYPE * GetBlockData(void) {return (BLOCKTYPE *)m_BlockData; } - bool HasJsonData (void) const {return m_HasJsonData; } - -protected: - - // NOTE: block data is serialized into inherited cChunkDataCollector's m_BlockData[] array - - // Entities and BlockEntities are serialized to Json - Json::Value m_Root; - bool m_HasJsonData; - - // cChunkDataCollector overrides: - virtual void Entity (cEntity * a_Entity) override; - virtual void BlockEntity (cBlockEntity * a_Entity) override; - virtual void LightIsValid (bool a_IsLightValid) override; -} ; - - - - - -class cWSSCompact : - public cWSSchema -{ -public: - cWSSCompact(cWorld * a_World, int a_CompressionFactor) : cWSSchema(a_World), m_CompressionFactor(a_CompressionFactor) {} - virtual ~cWSSCompact(); - -protected: - - enum - { - // Offsets to individual components in the joined blockdata array - MetaOffset = cChunkDef::NumBlocks, - LightOffset = MetaOffset + cChunkDef::NumBlocks / 2, - SkyLightOffset = LightOffset + cChunkDef::NumBlocks / 2, - } ; - - struct sChunkHeader; - typedef std::vector sChunkHeaders; - - /// Implements a cache for a single PAK file; implements lazy-write in order to be able to write multiple chunks fast - class cPAKFile - { - public: - - cPAKFile(const AString & a_FileName, int a_LayerX, int a_LayerZ, int a_CompressionFactor); - ~cPAKFile(); - - bool GetChunkData(const cChunkCoords & a_Chunk, int & a_UncompressedSize, AString & a_Data); - bool SetChunkData(const cChunkCoords & a_Chunk, int a_UncompressedSize, const AString & a_Data); - bool EraseChunkData(const cChunkCoords & a_Chunk); - - bool SaveChunk(const cChunkCoords & a_Chunk, cWorld * a_World); - - int GetLayerX(void) const {return m_LayerX; } - int GetLayerZ(void) const {return m_LayerZ; } - - static const int PAK_VERSION = 1; -#if AXIS_ORDER == AXIS_ORDER_XZY - static const int CHUNK_VERSION = 3; -#elif AXIS_ORDER == AXIS_ORDER_YZX - static const int CHUNK_VERSION = 2; -#endif - protected: - - AString m_FileName; - int m_CompressionFactor; - int m_LayerX; - int m_LayerZ; - - sChunkHeaders m_ChunkHeaders; - AString m_DataContents; // Data contents of the file, cached - - int m_NumDirty; // Number of chunks that were written into m_DataContents but not into the file - - Vector3i m_ChunkSize; // Is related to m_ChunkVersion - char m_ChunkVersion; - char m_PakVersion; - - bool SaveChunkToData(const cChunkCoords & a_Chunk, cWorld * a_World); // Saves the chunk to m_DataContents, updates headers and m_NumDirty - void SynchronizeFile(void); // Writes m_DataContents along with the headers to file, resets m_NumDirty - - void UpdateChunk1To2(void); // Height from 128 to 256 - void UpdateChunk2To3(void); // Axis order from YZX to XZY - } ; - - typedef std::list cPAKFiles; - - cCriticalSection m_CS; - cPAKFiles m_PAKFiles; // A MRU cache of PAK files - - int m_CompressionFactor; - - /// Loads the correct PAK file either from cache or from disk, manages the m_PAKFiles cache - cPAKFile * LoadPAKFile(const cChunkCoords & a_Chunk); - - /// Gets chunk data from the correct file; locks CS as needed - bool GetChunkData(const cChunkCoords & a_Chunk, int & a_UncompressedSize, AString & a_Data); - - /// Sets chunk data to the correct file; locks CS as needed - bool SetChunkData(const cChunkCoords & a_Chunk, int a_UncompressedSize, const AString & a_Data); - - /// Erases chunk data from the correct file; locks CS as needed - bool EraseChunkData(const cChunkCoords & a_Chunk); - - /// Loads the chunk from the data (no locking needed) - bool LoadChunkFromData(const cChunkCoords & a_Chunk, int a_UncompressedSize, const AString & a_Data, cWorld * a_World); - - void LoadEntitiesFromJson(Json::Value & a_Value, cEntityList & a_Entities, cBlockEntityList & a_BlockEntities, cWorld * a_World); - - // cWSSchema overrides: - virtual bool LoadChunk(const cChunkCoords & a_Chunk) override; - virtual bool SaveChunk(const cChunkCoords & a_Chunk) override; - virtual const AString GetName(void) const override {return "compact"; } -} ; - - - - - -#endif // WSSCOMPACT_H_INCLUDED - - - - diff --git a/src/WorldStorage/WorldStorage.cpp b/src/WorldStorage/WorldStorage.cpp index 179cf9393..c611bfd90 100644 --- a/src/WorldStorage/WorldStorage.cpp +++ b/src/WorldStorage/WorldStorage.cpp @@ -7,7 +7,6 @@ #include "Globals.h" #include "WorldStorage.h" -#include "WSSCompact.h" #include "WSSAnvil.h" #include "../World.h" #include "../Generating/ChunkGenerator.h" @@ -187,7 +186,6 @@ void cWorldStorage::InitSchemas(int a_StorageCompressionFactor) { // The first schema added is considered the default m_Schemas.push_back(new cWSSAnvil (m_World, a_StorageCompressionFactor)); - m_Schemas.push_back(new cWSSCompact (m_World, a_StorageCompressionFactor)); m_Schemas.push_back(new cWSSForgetful(m_World)); // Add new schemas here -- cgit v1.2.3 From a42fa071bc3ac1615e7c9b9d59fb4c882cc6097d Mon Sep 17 00:00:00 2001 From: Mattes D Date: Tue, 21 Oct 2014 22:02:30 +0200 Subject: Properly exported cItemFrame and cHangingEntity to Lua. --- src/WorldStorage/NBTChunkSerializer.cpp | 24 +++++++++--------- src/WorldStorage/WSSAnvil.cpp | 43 +++++++++++++++++++++------------ 2 files changed, 39 insertions(+), 28 deletions(-) (limited to 'src/WorldStorage') diff --git a/src/WorldStorage/NBTChunkSerializer.cpp b/src/WorldStorage/NBTChunkSerializer.cpp index 28b9dd042..80d9ab6d2 100644 --- a/src/WorldStorage/NBTChunkSerializer.cpp +++ b/src/WorldStorage/NBTChunkSerializer.cpp @@ -685,21 +685,21 @@ void cNBTChunkSerializer::AddProjectileEntity(cProjectileEntity * a_Projectile) void cNBTChunkSerializer::AddHangingEntity(cHangingEntity * a_Hanging) { - m_Writer.AddByte("Direction", (unsigned char)a_Hanging->GetDirection()); - m_Writer.AddInt("TileX", a_Hanging->GetTileX()); - m_Writer.AddInt("TileY", a_Hanging->GetTileY()); - m_Writer.AddInt("TileZ", a_Hanging->GetTileZ()); - switch (a_Hanging->GetDirection()) + m_Writer.AddInt("TileX", a_Hanging->GetBlockX()); + m_Writer.AddInt("TileY", a_Hanging->GetBlockY()); + m_Writer.AddInt("TileZ", a_Hanging->GetBlockZ()); + switch (a_Hanging->GetFacing()) { - case BLOCK_FACE_YM: m_Writer.AddByte("Dir", (unsigned char)2); break; - case BLOCK_FACE_YP: m_Writer.AddByte("Dir", (unsigned char)1); break; - case BLOCK_FACE_ZM: m_Writer.AddByte("Dir", (unsigned char)0); break; - case BLOCK_FACE_ZP: m_Writer.AddByte("Dir", (unsigned char)3); break; + case BLOCK_FACE_XM: m_Writer.AddByte("Facing", 1); break; + case BLOCK_FACE_XP: m_Writer.AddByte("Facing", 3); break; + case BLOCK_FACE_ZM: m_Writer.AddByte("Facing", 2); break; + case BLOCK_FACE_ZP: m_Writer.AddByte("Facing", 0); break; - case BLOCK_FACE_XM: - case BLOCK_FACE_XP: + case BLOCK_FACE_YM: + case BLOCK_FACE_YP: case BLOCK_FACE_NONE: { + // These directions are invalid, but they may have been previously loaded, so keep them. break; } } @@ -740,7 +740,7 @@ void cNBTChunkSerializer::AddItemFrameEntity(cItemFrame * a_ItemFrame) AddBasicEntity(a_ItemFrame, "ItemFrame"); AddHangingEntity(a_ItemFrame); AddItem(a_ItemFrame->GetItem(), -1, "Item"); - m_Writer.AddByte("ItemRotation", (unsigned char)a_ItemFrame->GetRotation()); + m_Writer.AddByte("ItemRotation", (unsigned char)a_ItemFrame->GetItemRotation()); m_Writer.AddFloat("ItemDropChance", 1.0F); m_Writer.EndCompound(); } diff --git a/src/WorldStorage/WSSAnvil.cpp b/src/WorldStorage/WSSAnvil.cpp index 092b9514c..eba21c659 100644 --- a/src/WorldStorage/WSSAnvil.cpp +++ b/src/WorldStorage/WSSAnvil.cpp @@ -1660,30 +1660,41 @@ void cWSSAnvil::LoadExpOrbFromNBT(cEntityList & a_Entities, const cParsedNBT & a void cWSSAnvil::LoadHangingFromNBT(cHangingEntity & a_Hanging, const cParsedNBT & a_NBT, int a_TagIdx) { - int Direction = a_NBT.FindChildByName(a_TagIdx, "Direction"); - if (Direction > 0) + // "Facing" tag is the prime source of the Facing; if not available, translate from older "Direction" or "Dir" + int Facing = a_NBT.FindChildByName(a_TagIdx, "Facing"); + if (Facing > 0) { - Direction = (int)a_NBT.GetByte(Direction); - if ((Direction < 2) || (Direction > 5)) + Facing = (int)a_NBT.GetByte(Facing); + if ((Facing >= 2) && (Facing <= 5)) { - a_Hanging.SetDirection(BLOCK_FACE_NORTH); - } - else - { - a_Hanging.SetDirection(static_cast(Direction)); + a_Hanging.SetFacing(static_cast(Facing)); } } else { - Direction = a_NBT.FindChildByName(a_TagIdx, "Dir"); - if (Direction > 0) + Facing = a_NBT.FindChildByName(a_TagIdx, "Direction"); + if (Facing > 0) + { + switch ((int)a_NBT.GetByte(Facing)) + { + case 0: a_Hanging.SetFacing(BLOCK_FACE_ZM); break; + case 1: a_Hanging.SetFacing(BLOCK_FACE_XM); break; + case 2: a_Hanging.SetFacing(BLOCK_FACE_ZP); break; + case 3: a_Hanging.SetFacing(BLOCK_FACE_XP); break; + } + } + else { - switch ((int)a_NBT.GetByte(Direction)) + Facing = a_NBT.FindChildByName(a_TagIdx, "Dir"); // Has values 0 and 2 swapped + if (Facing > 0) { - case 0: a_Hanging.SetDirection(BLOCK_FACE_NORTH); break; - case 1: a_Hanging.SetDirection(BLOCK_FACE_TOP); break; - case 2: a_Hanging.SetDirection(BLOCK_FACE_BOTTOM); break; - case 3: a_Hanging.SetDirection(BLOCK_FACE_SOUTH); break; + switch ((int)a_NBT.GetByte(Facing)) + { + case 0: a_Hanging.SetFacing(BLOCK_FACE_ZP); break; + case 1: a_Hanging.SetFacing(BLOCK_FACE_XM); break; + case 2: a_Hanging.SetFacing(BLOCK_FACE_ZM); break; + case 3: a_Hanging.SetFacing(BLOCK_FACE_XP); break; + } } } } -- cgit v1.2.3 From b979cad8931e6d107e18d424274aee4bd2e59b41 Mon Sep 17 00:00:00 2001 From: Mattes D Date: Tue, 21 Oct 2014 22:32:17 +0200 Subject: cItemFrame: Fixed a forgotten rename. --- src/WorldStorage/WSSAnvil.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src/WorldStorage') diff --git a/src/WorldStorage/WSSAnvil.cpp b/src/WorldStorage/WSSAnvil.cpp index eba21c659..8ebcff936 100644 --- a/src/WorldStorage/WSSAnvil.cpp +++ b/src/WorldStorage/WSSAnvil.cpp @@ -1743,7 +1743,7 @@ void cWSSAnvil::LoadItemFrameFromNBT(cEntityList & a_Entities, const cParsedNBT int Rotation = a_NBT.FindChildByName(a_TagIdx, "ItemRotation"); if (Rotation > 0) { - ItemFrame->SetRotation((Byte)a_NBT.GetByte(Rotation)); + ItemFrame->SetItemRotation((Byte)a_NBT.GetByte(Rotation)); } a_Entities.push_back(ItemFrame.release()); -- cgit v1.2.3 From a26541a7c3ced0569098edd0aae61c097c2912f4 Mon Sep 17 00:00:00 2001 From: Tiger Wang Date: Mon, 20 Oct 2014 21:55:07 +0100 Subject: En masse NULL -> nullptr replace --- src/WorldStorage/NBTChunkSerializer.cpp | 2 +- src/WorldStorage/ScoreboardSerializer.cpp | 6 +-- src/WorldStorage/WSSAnvil.cpp | 88 +++++++++++++++---------------- src/WorldStorage/WSSAnvil.h | 6 +-- src/WorldStorage/WorldStorage.cpp | 4 +- 5 files changed, 53 insertions(+), 53 deletions(-) (limited to 'src/WorldStorage') diff --git a/src/WorldStorage/NBTChunkSerializer.cpp b/src/WorldStorage/NBTChunkSerializer.cpp index 80d9ab6d2..d85a5c329 100644 --- a/src/WorldStorage/NBTChunkSerializer.cpp +++ b/src/WorldStorage/NBTChunkSerializer.cpp @@ -467,7 +467,7 @@ void cNBTChunkSerializer::AddMinecartEntity(cMinecart * a_Minecart) void cNBTChunkSerializer::AddMonsterEntity(cMonster * a_Monster) { - const char * EntityClass = NULL; + const char * EntityClass = nullptr; switch (a_Monster->GetMobType()) { case mtBat: EntityClass = "Bat"; break; diff --git a/src/WorldStorage/ScoreboardSerializer.cpp b/src/WorldStorage/ScoreboardSerializer.cpp index e30eecf67..404604382 100644 --- a/src/WorldStorage/ScoreboardSerializer.cpp +++ b/src/WorldStorage/ScoreboardSerializer.cpp @@ -174,13 +174,13 @@ void cScoreboardSerializer::SaveScoreboardToNBT(cFastNBTWriter & a_Writer) a_Writer.BeginCompound("DisplaySlots"); cObjective * Objective = m_ScoreBoard->GetObjectiveIn(cScoreboard::dsList); - a_Writer.AddString("slot_0", (Objective == NULL) ? "" : Objective->GetName()); + a_Writer.AddString("slot_0", (Objective == nullptr) ? "" : Objective->GetName()); Objective = m_ScoreBoard->GetObjectiveIn(cScoreboard::dsSidebar); - a_Writer.AddString("slot_1", (Objective == NULL) ? "" : Objective->GetName()); + a_Writer.AddString("slot_1", (Objective == nullptr) ? "" : Objective->GetName()); Objective = m_ScoreBoard->GetObjectiveIn(cScoreboard::dsName); - a_Writer.AddString("slot_2", (Objective == NULL) ? "" : Objective->GetName()); + a_Writer.AddString("slot_2", (Objective == nullptr) ? "" : Objective->GetName()); a_Writer.EndCompound(); // DisplaySlots diff --git a/src/WorldStorage/WSSAnvil.cpp b/src/WorldStorage/WSSAnvil.cpp index 8ebcff936..af7551ee4 100644 --- a/src/WorldStorage/WSSAnvil.cpp +++ b/src/WorldStorage/WSSAnvil.cpp @@ -126,7 +126,7 @@ cWSSAnvil::cWSSAnvil(cWorld * a_World, int a_CompressionFactor) : #endif // _DEBUG gzFile gz = gzopen((FILE_IO_PREFIX + fnam).c_str(), "wb"); - if (gz != NULL) + if (gz != nullptr) { gzwrite(gz, Writer.GetResult().data(), (unsigned)Writer.GetResult().size()); } @@ -193,7 +193,7 @@ bool cWSSAnvil::GetChunkData(const cChunkCoords & a_Chunk, AString & a_Data) { cCSLock Lock(m_CS); cMCAFile * File = LoadMCAFile(a_Chunk); - if (File == NULL) + if (File == nullptr) { return false; } @@ -208,7 +208,7 @@ bool cWSSAnvil::SetChunkData(const cChunkCoords & a_Chunk, const AString & a_Dat { cCSLock Lock(m_CS); cMCAFile * File = LoadMCAFile(a_Chunk); - if (File == NULL) + if (File == nullptr) { return false; } @@ -234,7 +234,7 @@ cWSSAnvil::cMCAFile * cWSSAnvil::LoadMCAFile(const cChunkCoords & a_Chunk) // Is it already cached? for (cMCAFiles::iterator itr = m_Files.begin(); itr != m_Files.end(); ++itr) { - if (((*itr) != NULL) && ((*itr)->GetRegionX() == RegionX) && ((*itr)->GetRegionZ() == RegionZ)) + if (((*itr) != nullptr) && ((*itr)->GetRegionX() == RegionX) && ((*itr)->GetRegionZ() == RegionZ)) { // Move the file to front and return it: cMCAFile * f = *itr; @@ -253,9 +253,9 @@ cWSSAnvil::cMCAFile * cWSSAnvil::LoadMCAFile(const cChunkCoords & a_Chunk) cFile::CreateFolder(FILE_IO_PREFIX + FileName); AppendPrintf(FileName, "/r.%d.%d.mca", RegionX, RegionZ); cMCAFile * f = new cMCAFile(FileName, RegionX, RegionZ); - if (f == NULL) + if (f == nullptr) { - return NULL; + return nullptr; } m_Files.push_front(f); @@ -373,7 +373,7 @@ bool cWSSAnvil::LoadChunkFromNBT(const cChunkCoords & a_Chunk, const cParsedNBT // Load the biomes from NBT, if present and valid. First try MCS-style, then Vanilla-style: cChunkDef::BiomeMap BiomeMap; cChunkDef::BiomeMap * Biomes = LoadBiomeMapFromNBT(&BiomeMap, a_NBT, a_NBT.FindChildByName(Level, "MCSBiomes")); - if (Biomes == NULL) + if (Biomes == nullptr) { // MCS-style biomes not available, load vanilla-style: Biomes = LoadVanillaBiomeMapFromNBT(&BiomeMap, a_NBT, a_NBT.FindChildByName(Level, "Biomes")); @@ -426,9 +426,9 @@ bool cWSSAnvil::LoadChunkFromNBT(const cChunkCoords & a_Chunk, const cParsedNBT cSetChunkDataPtr SetChunkData(new cSetChunkData( a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, BlockTypes, MetaData, - IsLightValid ? BlockLight : NULL, - IsLightValid ? SkyLight : NULL, - NULL, Biomes, + IsLightValid ? BlockLight : nullptr, + IsLightValid ? SkyLight : nullptr, + nullptr, Biomes, Entities, BlockEntities, false )); @@ -525,12 +525,12 @@ cChunkDef::BiomeMap * cWSSAnvil::LoadVanillaBiomeMapFromNBT(cChunkDef::BiomeMap { if ((a_TagIdx < 0) || (a_NBT.GetType(a_TagIdx) != TAG_ByteArray)) { - return NULL; + return nullptr; } if (a_NBT.GetDataLength(a_TagIdx) != 16 * 16) { // The biomes stored don't match in size - return NULL; + return nullptr; } const unsigned char * VanillaBiomeData = (const unsigned char *)(a_NBT.GetData(a_TagIdx)); for (size_t i = 0; i < ARRAYCOUNT(*a_BiomeMap); i++) @@ -538,7 +538,7 @@ cChunkDef::BiomeMap * cWSSAnvil::LoadVanillaBiomeMapFromNBT(cChunkDef::BiomeMap if ((VanillaBiomeData)[i] == 0xff) { // Unassigned biomes - return NULL; + return nullptr; } (*a_BiomeMap)[i] = (EMCSBiome)(VanillaBiomeData[i]); } @@ -553,12 +553,12 @@ cChunkDef::BiomeMap * cWSSAnvil::LoadBiomeMapFromNBT(cChunkDef::BiomeMap * a_Bio { if ((a_TagIdx < 0) || (a_NBT.GetType(a_TagIdx) != TAG_IntArray)) { - return NULL; + return nullptr; } if (a_NBT.GetDataLength(a_TagIdx) != sizeof(*a_BiomeMap)) { // The biomes stored don't match in size - return NULL; + return nullptr; } const char * BiomeData = (a_NBT.GetData(a_TagIdx)); for (size_t i = 0; i < ARRAYCOUNT(*a_BiomeMap); i++) @@ -567,7 +567,7 @@ cChunkDef::BiomeMap * cWSSAnvil::LoadBiomeMapFromNBT(cChunkDef::BiomeMap * a_Bio if ((*a_BiomeMap)[i] == 0xff) { // Unassigned biomes - return NULL; + return nullptr; } } return a_BiomeMap; @@ -631,7 +631,7 @@ void cWSSAnvil::LoadBlockEntitiesFromNBT(cBlockEntityList & a_BlockEntities, con BLOCKTYPE BlockType = cChunkDef::GetBlock(a_BlockTypes, RelX, RelY, RelZ); NIBBLETYPE BlockMeta = cChunkDef::GetNibble(a_BlockMetas, RelX, RelY, RelZ); std::auto_ptr be(LoadBlockEntityFromNBT(a_NBT, Child, x, y, z, BlockType, BlockMeta)); - if (be.get() == NULL) + if (be.get() == nullptr) { continue; } @@ -670,7 +670,7 @@ cBlockEntity * cWSSAnvil::LoadBlockEntityFromNBT(const cParsedNBT & a_NBT, int a case E_BLOCK_WALLSIGN: return LoadSignFromNBT (a_NBT, a_Tag, a_BlockX, a_BlockY, a_BlockZ, E_BLOCK_WALLSIGN); // Blocktypes that have block entities but don't load their contents from disk: - case E_BLOCK_ENDER_CHEST: return NULL; + case E_BLOCK_ENDER_CHEST: return nullptr; } // All the other blocktypes should have no entities assigned to them. Report an error: @@ -686,7 +686,7 @@ cBlockEntity * cWSSAnvil::LoadBlockEntityFromNBT(const cParsedNBT & a_NBT, int a ItemTypeToString(a_BlockType).c_str(), a_BlockType, TypeName.c_str(), a_BlockX, a_BlockY, a_BlockZ ); - return NULL; + return nullptr; } @@ -836,7 +836,7 @@ cBlockEntity * cWSSAnvil::LoadBeaconFromNBT(const cParsedNBT & a_NBT, int a_TagI // Check if the data has a proper type: if (!CheckBlockEntityType(a_NBT, a_TagIdx, "Beacon")) { - return NULL; + return nullptr; } std::auto_ptr Beacon(new cBeaconEntity(a_BlockX, a_BlockY, a_BlockZ, m_World)); @@ -881,13 +881,13 @@ cBlockEntity * cWSSAnvil::LoadChestFromNBT(const cParsedNBT & a_NBT, int a_TagId // https://github.com/mc-server/MCServer/blob/d0551e2e0a98a28f31a88d489d17b408e4a7d38d/src/WorldStorage/WSSAnvil.cpp#L637 if (!CheckBlockEntityType(a_NBT, a_TagIdx, "Chest") && !CheckBlockEntityType(a_NBT, a_TagIdx, "TrappedChest")) { - return NULL; + return nullptr; } int Items = a_NBT.FindChildByName(a_TagIdx, "Items"); if ((Items < 0) || (a_NBT.GetType(Items) != TAG_List)) { - return NULL; // Make it an empty chest - the chunk loader will provide an empty cChestEntity for this + return nullptr; // Make it an empty chest - the chunk loader will provide an empty cChestEntity for this } std::auto_ptr Chest(new cChestEntity(a_BlockX, a_BlockY, a_BlockZ, m_World, a_ChestBlockType)); LoadItemGridFromNBT(Chest->GetContents(), a_NBT, Items); @@ -903,7 +903,7 @@ cBlockEntity * cWSSAnvil::LoadCommandBlockFromNBT(const cParsedNBT & a_NBT, int // Check if the data has a proper type: if (!CheckBlockEntityType(a_NBT, a_TagIdx, "Control")) { - return NULL; + return nullptr; } std::auto_ptr CmdBlock(new cCommandBlockEntity(a_BlockX, a_BlockY, a_BlockZ, m_World)); @@ -940,13 +940,13 @@ cBlockEntity * cWSSAnvil::LoadDispenserFromNBT(const cParsedNBT & a_NBT, int a_T // Check if the data has a proper type: if (!CheckBlockEntityType(a_NBT, a_TagIdx, "Trap")) { - return NULL; + return nullptr; } int Items = a_NBT.FindChildByName(a_TagIdx, "Items"); if ((Items < 0) || (a_NBT.GetType(Items) != TAG_List)) { - return NULL; // Make it an empty dispenser - the chunk loader will provide an empty cDispenserEntity for this + return nullptr; // Make it an empty dispenser - the chunk loader will provide an empty cDispenserEntity for this } std::auto_ptr Dispenser(new cDispenserEntity(a_BlockX, a_BlockY, a_BlockZ, m_World)); LoadItemGridFromNBT(Dispenser->GetContents(), a_NBT, Items); @@ -962,13 +962,13 @@ cBlockEntity * cWSSAnvil::LoadDropperFromNBT(const cParsedNBT & a_NBT, int a_Tag // Check if the data has a proper type: if (!CheckBlockEntityType(a_NBT, a_TagIdx, "Dropper")) { - return NULL; + return nullptr; } int Items = a_NBT.FindChildByName(a_TagIdx, "Items"); if ((Items < 0) || (a_NBT.GetType(Items) != TAG_List)) { - return NULL; // Make it an empty dropper - the chunk loader will provide an empty cDropperEntity for this + return nullptr; // Make it an empty dropper - the chunk loader will provide an empty cDropperEntity for this } std::auto_ptr Dropper(new cDropperEntity(a_BlockX, a_BlockY, a_BlockZ, m_World)); LoadItemGridFromNBT(Dropper->GetContents(), a_NBT, Items); @@ -984,7 +984,7 @@ cBlockEntity * cWSSAnvil::LoadFlowerPotFromNBT(const cParsedNBT & a_NBT, int a_T // Check if the data has a proper type: if (!CheckBlockEntityType(a_NBT, a_TagIdx, "FlowerPot")) { - return NULL; + return nullptr; } std::auto_ptr FlowerPot(new cFlowerPotEntity(a_BlockX, a_BlockY, a_BlockZ, m_World)); @@ -1015,13 +1015,13 @@ cBlockEntity * cWSSAnvil::LoadFurnaceFromNBT(const cParsedNBT & a_NBT, int a_Tag // Check if the data has a proper type: if (!CheckBlockEntityType(a_NBT, a_TagIdx, "Furnace")) { - return NULL; + return nullptr; } int Items = a_NBT.FindChildByName(a_TagIdx, "Items"); if ((Items < 0) || (a_NBT.GetType(Items) != TAG_List)) { - return NULL; // Make it an empty furnace - the chunk loader will provide an empty cFurnaceEntity for this + return nullptr; // Make it an empty furnace - the chunk loader will provide an empty cFurnaceEntity for this } std::auto_ptr Furnace(new cFurnaceEntity(a_BlockX, a_BlockY, a_BlockZ, a_BlockType, a_BlockMeta, m_World)); @@ -1073,13 +1073,13 @@ cBlockEntity * cWSSAnvil::LoadHopperFromNBT(const cParsedNBT & a_NBT, int a_TagI // Check if the data has a proper type: if (!CheckBlockEntityType(a_NBT, a_TagIdx, "Hopper")) { - return NULL; + return nullptr; } int Items = a_NBT.FindChildByName(a_TagIdx, "Items"); if ((Items < 0) || (a_NBT.GetType(Items) != TAG_List)) { - return NULL; // Make it an empty hopper - the chunk loader will provide an empty cHopperEntity for this + return nullptr; // Make it an empty hopper - the chunk loader will provide an empty cHopperEntity for this } std::auto_ptr Hopper(new cHopperEntity(a_BlockX, a_BlockY, a_BlockZ, m_World)); LoadItemGridFromNBT(Hopper->GetContents(), a_NBT, Items); @@ -1095,7 +1095,7 @@ cBlockEntity * cWSSAnvil::LoadJukeboxFromNBT(const cParsedNBT & a_NBT, int a_Tag // Check if the data has a proper type: if (!CheckBlockEntityType(a_NBT, a_TagIdx, "RecordPlayer")) { - return NULL; + return nullptr; } std::auto_ptr Jukebox(new cJukeboxEntity(a_BlockX, a_BlockY, a_BlockZ, m_World)); @@ -1116,7 +1116,7 @@ cBlockEntity * cWSSAnvil::LoadMobHeadFromNBT(const cParsedNBT & a_NBT, int a_Tag // Check if the data has a proper type: if (!CheckBlockEntityType(a_NBT, a_TagIdx, "Skull")) { - return NULL; + return nullptr; } std::auto_ptr MobHead(new cMobHeadEntity(a_BlockX, a_BlockY, a_BlockZ, m_World)); @@ -1151,7 +1151,7 @@ cBlockEntity * cWSSAnvil::LoadNoteBlockFromNBT(const cParsedNBT & a_NBT, int a_T // Check if the data has a proper type: if (!CheckBlockEntityType(a_NBT, a_TagIdx, "Music")) { - return NULL; + return nullptr; } std::auto_ptr NoteBlock(new cNoteEntity(a_BlockX, a_BlockY, a_BlockZ, m_World)); @@ -1172,7 +1172,7 @@ cBlockEntity * cWSSAnvil::LoadSignFromNBT(const cParsedNBT & a_NBT, int a_TagIdx // Check if the data has a proper type: if (!CheckBlockEntityType(a_NBT, a_TagIdx, "Sign")) { - return NULL; + return nullptr; } std::auto_ptr Sign(new cSignEntity(a_BlockType, a_BlockX, a_BlockY, a_BlockZ, m_World)); @@ -1755,7 +1755,7 @@ void cWSSAnvil::LoadItemFrameFromNBT(cEntityList & a_Entities, const cParsedNBT void cWSSAnvil::LoadArrowFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx) { - std::auto_ptr Arrow(new cArrowEntity(NULL, 0, 0, 0, Vector3d(0, 0, 0))); + std::auto_ptr Arrow(new cArrowEntity(nullptr, 0, 0, 0, Vector3d(0, 0, 0))); if (!LoadProjectileBaseFromNBT(*Arrow.get(), a_NBT, a_TagIdx)) { return; @@ -1819,7 +1819,7 @@ void cWSSAnvil::LoadArrowFromNBT(cEntityList & a_Entities, const cParsedNBT & a_ void cWSSAnvil::LoadSplashPotionFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx) { - std::auto_ptr SplashPotion(new cSplashPotionEntity(NULL, 0, 0, 0, Vector3d(0, 0, 0), cItem())); + std::auto_ptr SplashPotion(new cSplashPotionEntity(nullptr, 0, 0, 0, Vector3d(0, 0, 0), cItem())); if (!LoadProjectileBaseFromNBT(*SplashPotion.get(), a_NBT, a_TagIdx)) { return; @@ -1843,7 +1843,7 @@ void cWSSAnvil::LoadSplashPotionFromNBT(cEntityList & a_Entities, const cParsedN void cWSSAnvil::LoadSnowballFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx) { - std::auto_ptr Snowball(new cThrownSnowballEntity(NULL, 0, 0, 0, Vector3d(0, 0, 0))); + std::auto_ptr Snowball(new cThrownSnowballEntity(nullptr, 0, 0, 0, Vector3d(0, 0, 0))); if (!LoadProjectileBaseFromNBT(*Snowball.get(), a_NBT, a_TagIdx)) { return; @@ -1859,7 +1859,7 @@ void cWSSAnvil::LoadSnowballFromNBT(cEntityList & a_Entities, const cParsedNBT & void cWSSAnvil::LoadEggFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx) { - std::auto_ptr Egg(new cThrownEggEntity(NULL, 0, 0, 0, Vector3d(0, 0, 0))); + std::auto_ptr Egg(new cThrownEggEntity(nullptr, 0, 0, 0, Vector3d(0, 0, 0))); if (!LoadProjectileBaseFromNBT(*Egg.get(), a_NBT, a_TagIdx)) { return; @@ -1875,7 +1875,7 @@ void cWSSAnvil::LoadEggFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NB void cWSSAnvil::LoadFireballFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx) { - std::auto_ptr Fireball(new cGhastFireballEntity(NULL, 0, 0, 0, Vector3d(0, 0, 0))); + std::auto_ptr Fireball(new cGhastFireballEntity(nullptr, 0, 0, 0, Vector3d(0, 0, 0))); if (!LoadProjectileBaseFromNBT(*Fireball.get(), a_NBT, a_TagIdx)) { return; @@ -1891,7 +1891,7 @@ void cWSSAnvil::LoadFireballFromNBT(cEntityList & a_Entities, const cParsedNBT & void cWSSAnvil::LoadFireChargeFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx) { - std::auto_ptr FireCharge(new cFireChargeEntity(NULL, 0, 0, 0, Vector3d(0, 0, 0))); + std::auto_ptr FireCharge(new cFireChargeEntity(nullptr, 0, 0, 0, Vector3d(0, 0, 0))); if (!LoadProjectileBaseFromNBT(*FireCharge.get(), a_NBT, a_TagIdx)) { return; @@ -1907,7 +1907,7 @@ void cWSSAnvil::LoadFireChargeFromNBT(cEntityList & a_Entities, const cParsedNBT void cWSSAnvil::LoadThrownEnderpearlFromNBT(cEntityList & a_Entities, const cParsedNBT & a_NBT, int a_TagIdx) { - std::auto_ptr Enderpearl(new cThrownEnderPearlEntity(NULL, 0, 0, 0, Vector3d(0, 0, 0))); + std::auto_ptr Enderpearl(new cThrownEnderPearlEntity(nullptr, 0, 0, 0, Vector3d(0, 0, 0))); if (!LoadProjectileBaseFromNBT(*Enderpearl.get(), a_NBT, a_TagIdx)) { return; @@ -2864,7 +2864,7 @@ bool cWSSAnvil::cMCAFile::OpenFile(bool a_IsForReading) if (m_File.Read(m_Header, sizeof(m_Header)) != sizeof(m_Header)) { // Cannot read the header - perhaps the file has just been created? - // Try writing a NULL header (both chunk offsets and timestamps): + // Try writing a nullptr header (both chunk offsets and timestamps): memset(m_Header, 0, sizeof(m_Header)); if ( (m_File.Write(m_Header, sizeof(m_Header)) != sizeof(m_Header)) || // Real header - chunk offsets diff --git a/src/WorldStorage/WSSAnvil.h b/src/WorldStorage/WSSAnvil.h index 591ec6757..9c579a617 100644 --- a/src/WorldStorage/WSSAnvil.h +++ b/src/WorldStorage/WSSAnvil.h @@ -113,10 +113,10 @@ protected: /// Saves the chunk into NBT data using a_Writer; returns true on success bool SaveChunkToNBT(const cChunkCoords & a_Chunk, cFastNBTWriter & a_Writer); - /// Loads the chunk's biome map from vanilla-format; returns a_BiomeMap if biomes present and valid, NULL otherwise + /// Loads the chunk's biome map from vanilla-format; returns a_BiomeMap if biomes present and valid, nullptr otherwise cChunkDef::BiomeMap * LoadVanillaBiomeMapFromNBT(cChunkDef::BiomeMap * a_BiomeMap, const cParsedNBT & a_NBT, int a_TagIdx); - /// Loads the chunk's biome map from MCS format; returns a_BiomeMap if biomes present and valid, NULL otherwise + /// Loads the chunk's biome map from MCS format; returns a_BiomeMap if biomes present and valid, nullptr otherwise cChunkDef::BiomeMap * LoadBiomeMapFromNBT(cChunkDef::BiomeMap * a_BiomeMap, const cParsedNBT & a_NBT, int a_TagIdx); /// Loads the chunk's entities from NBT data (a_Tag is the Level\\Entities list tag; may be -1) @@ -126,7 +126,7 @@ protected: void LoadBlockEntitiesFromNBT(cBlockEntityList & a_BlockEntitites, const cParsedNBT & a_NBT, int a_Tag, BLOCKTYPE * a_BlockTypes, NIBBLETYPE * a_BlockMetas); /** Loads the data for a block entity from the specified NBT tag. - Returns the loaded block entity, or NULL upon failure. */ + Returns the loaded block entity, or nullptr upon failure. */ cBlockEntity * LoadBlockEntityFromNBT(const cParsedNBT & a_NBT, int a_Tag, int a_BlockX, int a_BlockY, int a_BlockZ, BLOCKTYPE a_BlockType, NIBBLETYPE a_BlockMeta); /// Loads a cItem contents from the specified NBT tag; returns true if successful. Doesn't load the Slot tag diff --git a/src/WorldStorage/WorldStorage.cpp b/src/WorldStorage/WorldStorage.cpp index c611bfd90..31318ee67 100644 --- a/src/WorldStorage/WorldStorage.cpp +++ b/src/WorldStorage/WorldStorage.cpp @@ -39,8 +39,8 @@ protected: cWorldStorage::cWorldStorage(void) : super("cWorldStorage"), - m_World(NULL), - m_SaveSchema(NULL) + m_World(nullptr), + m_SaveSchema(nullptr) { } -- cgit v1.2.3 From b67eb2169eba24b767163d882dbf667a429bfbd6 Mon Sep 17 00:00:00 2001 From: Alexander Harkness Date: Mon, 27 Oct 2014 20:16:51 +0000 Subject: Another one.# --- src/WorldStorage/NBTChunkSerializer.cpp | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) (limited to 'src/WorldStorage') diff --git a/src/WorldStorage/NBTChunkSerializer.cpp b/src/WorldStorage/NBTChunkSerializer.cpp index d85a5c329..1ecdcb5fd 100644 --- a/src/WorldStorage/NBTChunkSerializer.cpp +++ b/src/WorldStorage/NBTChunkSerializer.cpp @@ -604,6 +604,28 @@ void cNBTChunkSerializer::AddMonsterEntity(cMonster * a_Monster) m_Writer.AddByte("IsConverting", (((const cZombie *)a_Monster)->IsConverting() ? 1 : 0)); break; } + case mtInvalidType: + case mtBlaze: + case mtCaveSpider: + case mtChicken: + case mtCow: + case mtEnderDragon: + case mtGhast: + case mtGiant: + case mtIronGolem + case mtMooshroom: + case mtOcelot: + case mtPig: + case mtSilverfish: + case mtSnowGolem: + case meSpider: + case mtSquid: + case mtWitch: + case mtZombiePigman: + { + // Other mobs have no special tags. + break; + } } m_Writer.EndCompound(); } -- cgit v1.2.3 From 232343065f508c3ec32a612d15c6cab7a1553721 Mon Sep 17 00:00:00 2001 From: Alexander Harkness Date: Mon, 27 Oct 2014 22:03:11 +0000 Subject: Fixed compile (typos). --- src/WorldStorage/NBTChunkSerializer.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'src/WorldStorage') diff --git a/src/WorldStorage/NBTChunkSerializer.cpp b/src/WorldStorage/NBTChunkSerializer.cpp index 1ecdcb5fd..4daa8cc7b 100644 --- a/src/WorldStorage/NBTChunkSerializer.cpp +++ b/src/WorldStorage/NBTChunkSerializer.cpp @@ -612,13 +612,13 @@ void cNBTChunkSerializer::AddMonsterEntity(cMonster * a_Monster) case mtEnderDragon: case mtGhast: case mtGiant: - case mtIronGolem + case mtIronGolem: case mtMooshroom: case mtOcelot: case mtPig: case mtSilverfish: case mtSnowGolem: - case meSpider: + case mtSpider: case mtSquid: case mtWitch: case mtZombiePigman: -- cgit v1.2.3 From 165533c27e380300f5e9411e4d46aa553bf2fdeb Mon Sep 17 00:00:00 2001 From: Howaner Date: Wed, 29 Oct 2014 15:47:43 +0100 Subject: Fixed 1.8 world item format reading. --- src/WorldStorage/WSSAnvil.cpp | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) (limited to 'src/WorldStorage') diff --git a/src/WorldStorage/WSSAnvil.cpp b/src/WorldStorage/WSSAnvil.cpp index af7551ee4..0c77b4d67 100644 --- a/src/WorldStorage/WSSAnvil.cpp +++ b/src/WorldStorage/WSSAnvil.cpp @@ -696,11 +696,28 @@ cBlockEntity * cWSSAnvil::LoadBlockEntityFromNBT(const cParsedNBT & a_NBT, int a bool cWSSAnvil::LoadItemFromNBT(cItem & a_Item, const cParsedNBT & a_NBT, int a_TagIdx) { int Type = a_NBT.FindChildByName(a_TagIdx, "id"); - if ((Type < 0) || (a_NBT.GetType(Type) != TAG_Short)) + if (Type <= 0) { return false; } - a_Item.m_ItemType = a_NBT.GetShort(Type); + + if (a_NBT.GetType(Type) == TAG_String) + { + if (!StringToItem(a_NBT.GetString(Type), a_Item)) + { + // Can't resolve item type + return false; + } + } + else if (a_NBT.GetType(Type) == TAG_Short) + { + a_Item.m_ItemType = a_NBT.GetShort(Type); + } + else + { + return false; + } + if (a_Item.m_ItemType < 0) { a_Item.Empty(); -- cgit v1.2.3