propertyType, DATATYPE value, Vector3ic pos) {
updateBlockProperty(propertyType, value, pos.x(), pos.y(), pos.z(), 0);
}
diff --git a/api/src/main/java/org/allaymc/api/world/chunk/Chunk.java b/api/src/main/java/org/allaymc/api/world/chunk/Chunk.java
index b42f58336f..c49818ecdd 100644
--- a/api/src/main/java/org/allaymc/api/world/chunk/Chunk.java
+++ b/api/src/main/java/org/allaymc/api/world/chunk/Chunk.java
@@ -1,20 +1,12 @@
package org.allaymc.api.world.chunk;
-import org.allaymc.api.block.type.BlockState;
-import org.allaymc.api.entity.interfaces.EntityPlayer;
-import org.allaymc.api.world.biome.BiomeType;
-import org.cloudburstmc.protocol.bedrock.packet.BedrockPacket;
-import org.jetbrains.annotations.UnmodifiableView;
-
import javax.annotation.concurrent.ThreadSafe;
-import java.util.Set;
-import java.util.function.Predicate;
-import java.util.stream.Collectors;
+import java.util.function.Consumer;
/**
* Chunk represents a 16x16 area in a world.
*
- * All methods in this class are thread-safe. However, Frequent calls to methods in this class
+ * All methods in this class are thread-safe. However, frequent calls to methods in this class
* result in huge lock overhead. If you are sure that the instance won't be accessed by multiple threads,
* you can operate on unsafe chunk directly. To get the unsafe chunk, use {@link #toUnsafeChunk()}.
*
@@ -26,151 +18,28 @@
public interface Chunk extends UnsafeChunk {
/**
- * Get the chunk loaders that load this chunk
- *
- * @return the chunk loaders
- */
- @UnmodifiableView
- Set getChunkLoaders();
-
- /**
- * Get the player chunk loaders that load this chunk
- *
- * @return the player chunk loaders
- */
- @UnmodifiableView
- default Set getPlayerChunkLoaders() {
- return getChunkLoaders().stream()
- .filter(EntityPlayer.class::isInstance)
- .map(EntityPlayer.class::cast)
- .collect(Collectors.toSet());
- }
-
- /**
- * Add a chunk loader to this chunk.
- *
- * @param chunkLoader the chunk loader to add
- */
- void addChunkLoader(ChunkLoader chunkLoader);
-
- /**
- * Remove a chunk loader from this chunk.
- *
- * @param chunkLoader the chunk loader to remove
- */
- void removeChunkLoader(ChunkLoader chunkLoader);
-
- /**
- * Get the number of chunk loaders that load this chunk.
- *
- * @return the number of chunk loaders
- */
- int getChunkLoaderCount();
-
- /**
- * Add a chunk packet to the chunk.
- *
- * Chunk packet will be sent to all chunk loaders every tick.
+ * Apply the chunk with the specified operation.
*
- * @param packet the packet to add
+ * @param operation the operation to be applied in the chunk.
+ * @param block the operation type of block.
+ * @param biome the operation type of biome.
*/
- void addChunkPacket(BedrockPacket packet);
+ void applyOperation(Consumer operation, OperationType block, OperationType biome, OperationType height);
/**
- * Add a chunk packet to the chunk.
- *
- * Chunk packet will be sent to chunk loaders that match the predicate every tick.
+ * Apply the section in the chunk with the specified operation.
*
- * @param packet the packet to add
- * @param chunkLoaderPredicate the predicate to match chunk loaders
+ * @param sectionY the section y of the section.
+ * @param operation the operation to be applied in the chunk section.
+ * @param block the operation type of block.
+ * @param biome the operation type of biome.
*/
- void addChunkPacket(BedrockPacket packet, Predicate chunkLoaderPredicate);
+ void applyOperationInSection(int sectionY, Consumer operation, OperationType block, OperationType biome);
/**
- * Send packet to all chunk loaders.
- */
- void sendChunkPacket(BedrockPacket packet);
-
- /**
- * Send packet to chunk loaders that match the predicate.
+ * Convert this chunk to a {@link UnsafeChunk} which is unsafe in multithreaded environment.
*
- * @param packet the packet to send
- * @param chunkLoaderPredicate the predicate to match chunk loaders
- */
- void sendChunkPacket(BedrockPacket packet, Predicate chunkLoaderPredicate);
-
- /**
- * Send all chunk packets which are added since last tick to all chunk loaders.
- */
- void sendChunkPackets();
-
- /**
- * Compare and set block at the specified position.
- *
- * @param x the x coordinate.
- * @param y the y coordinate.
- * @param z the z coordinate.
- * @param expectedValue the expected block state.
- * @param newValue the new block state.
- * @param layer the layer to compare and set.
- */
- void compareAndSetBlock(int x, int y, int z, BlockState expectedValue, BlockState newValue, int layer);
-
- /**
- * Compare and set biome at the specified position.
- *
- * @param x the x coordinate.
- * @param y the y coordinate.
- * @param z the z coordinate.
- * @param expectedValue the expected biome type.
- * @param newValue the new biome type.
- */
- void compareAndSetBiome(int x, int y, int z, BiomeType expectedValue, BiomeType newValue);
-
- /**
- * Compare and set height at the specified position.
- *
- * @param x the x coordinate.
- * @param z the z coordinate.
- * @param expectedValue the expected height.
- * @param newValue the new height.
- */
- void compareAndSetHeight(int x, int z, short expectedValue, short newValue);
-
- /**
- * Process the chunk with the specified operation.
- *
- * This method will only add a lock once, so it is more efficient than calling other methods
- * in this class frequently.
- * If you are going to get a range of blocks in the chunk, using this method will be an ideal choice
- * to avoid lock overhead.
- *
- * @param operate the operation to process the chunk
- */
- void batchProcess(UnsafeChunkOperate operate);
-
- /**
- * Get the unsafe chunk of this chunk.
- *
- * @return the unsafe chunk
+ * @return the unsafe chunk.
*/
UnsafeChunk toUnsafeChunk();
-
- /**
- * Spawn entities in this chunk to the specified player.
- *
- * @param player the player to spawn entities to
- */
- default void spawnEntitiesTo(EntityPlayer player) {
- getEntities().values().forEach(player::spawnEntity);
- }
-
- /**
- * Despawn entities in this chunk from the specified player.
- *
- * @param player the player to despawn entities from
- */
- default void despawnEntitiesFrom(EntityPlayer player) {
- getEntities().values().forEach(player::despawnEntity);
- }
}
diff --git a/api/src/main/java/org/allaymc/api/world/chunk/ChunkSection.java b/api/src/main/java/org/allaymc/api/world/chunk/ChunkSection.java
new file mode 100644
index 0000000000..6083664072
--- /dev/null
+++ b/api/src/main/java/org/allaymc/api/world/chunk/ChunkSection.java
@@ -0,0 +1,60 @@
+package org.allaymc.api.world.chunk;
+
+import org.allaymc.api.block.type.BlockState;
+import org.allaymc.api.world.biome.BiomeType;
+import org.jetbrains.annotations.Range;
+
+import javax.annotation.concurrent.NotThreadSafe;
+
+/**
+ * Represents a 16*16*16 area in a {@link Chunk}. Note that all methods in this class is not thread-safe.
+ *
+ * @author daoge_cmd
+ */
+@NotThreadSafe
+public interface ChunkSection {
+
+ /**
+ * Get the block state at the given local position.
+ *
+ * @param x the local x coordinate of the block state.
+ * @param y the local y coordinate of the block state.
+ * @param z the local z coordinate of the block state.
+ * @param layer the layer of the block state.
+ *
+ * @return the block state at the given position.
+ */
+ BlockState getBlockState(@Range(from = 0, to = 15) int x, @Range(from = 0, to = 15) int y, @Range(from = 0, to = 15) int z, int layer);
+
+ /**
+ * Set the block state at the given local position.
+ *
+ * @param x the local x coordinate of the block state.
+ * @param y the local y coordinate of the block state.
+ * @param z the local z coordinate of the block state.
+ * @param blockState the block state to set.
+ * @param layer the layer of the block state.
+ */
+ void setBlockState(@Range(from = 0, to = 15) int x, @Range(from = 0, to = 15) int y, @Range(from = 0, to = 15) int z, BlockState blockState, int layer);
+
+ /**
+ * Get the biome type at the given local position.
+ *
+ * @param x the local x coordinate of the biome type.
+ * @param y the local y coordinate of the biome type.
+ * @param z the local z coordinate of the biome type.
+ *
+ * @return the biome type at the given position.
+ */
+ BiomeType getBiomeType(@Range(from = 0, to = 15) int x, @Range(from = 0, to = 15) int y, @Range(from = 0, to = 15) int z);
+
+ /**
+ * Set the biome type at the given local position.
+ *
+ * @param x the local x coordinate of the biome type.
+ * @param y the local y coordinate of the biome type.
+ * @param z the local z coordinate of the biome type.
+ * @param biomeType the biome type to set.
+ */
+ void setBiomeType(@Range(from = 0, to = 15) int x, @Range(from = 0, to = 15) int y, @Range(from = 0, to = 15) int z, BiomeType biomeType);
+}
diff --git a/api/src/main/java/org/allaymc/api/world/chunk/OperationType.java b/api/src/main/java/org/allaymc/api/world/chunk/OperationType.java
new file mode 100644
index 0000000000..f4a3f83292
--- /dev/null
+++ b/api/src/main/java/org/allaymc/api/world/chunk/OperationType.java
@@ -0,0 +1,10 @@
+package org.allaymc.api.world.chunk;
+
+/**
+ * @author daoge_cmd
+ */
+public enum OperationType {
+ NONE,
+ READ,
+ WRITE
+}
diff --git a/api/src/main/java/org/allaymc/api/world/chunk/UnsafeChunk.java b/api/src/main/java/org/allaymc/api/world/chunk/UnsafeChunk.java
index 71fd5ffa4e..ea076eaad4 100644
--- a/api/src/main/java/org/allaymc/api/world/chunk/UnsafeChunk.java
+++ b/api/src/main/java/org/allaymc/api/world/chunk/UnsafeChunk.java
@@ -1,28 +1,33 @@
package org.allaymc.api.world.chunk;
-import com.google.common.base.Preconditions;
import org.allaymc.api.block.type.BlockState;
import org.allaymc.api.blockentity.BlockEntity;
import org.allaymc.api.entity.Entity;
+import org.allaymc.api.entity.interfaces.EntityPlayer;
import org.allaymc.api.utils.HashUtils;
import org.allaymc.api.world.DimensionInfo;
import org.allaymc.api.world.biome.BiomeType;
+import org.cloudburstmc.protocol.bedrock.packet.BedrockPacket;
import org.jetbrains.annotations.Range;
import org.jetbrains.annotations.UnmodifiableView;
import javax.annotation.concurrent.NotThreadSafe;
import java.util.Collection;
+import java.util.List;
import java.util.Map;
+import java.util.Set;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
/**
- * The UnsafeChunk is located inside the {@link Chunk}, which is not thread-safe.
+ * Unsafe is similar to {@link Chunk} but is not thread-safe.
*
- * Compared to {@link Chunk}, unsafe chunk works more like a simple data container,
- * which means that it is very fast but is not thread-safe.
- *
- * If you are sure that the instance won't be accessed by multiple threads,
- * you can operate on unsafe chunk directly. However, this may become very dangerous
- * if you do not have enough experience in multithreaded programming.
+ * Compared to {@link Chunk}, unsafe chunk is very fast but is not thread-safe.
+ * All the methods in this class should be considered as thread-unsafe, although
+ * some of them may be thread-safe depending on the implementation. If you are sure
+ * that the instance won't be accessed by multiple threads, you can operate on unsafe
+ * chunk directly. However, this may become very dangerous if you do not have enough
+ * experience in multithreaded programming.
*
* @author Cool_Loong | daoge_cmd
*/
@@ -30,28 +35,85 @@
public interface UnsafeChunk {
/**
- * Calculate the index of the pos in the chunk.
+ * Check if the chunk is loaded.
*
- * @param x the x coordinate of the pos.
- * @param y the y coordinate of the pos.
- * @param z the z coordinate of the pos.
+ * @return {@code true} if the chunk is loaded, {@code false} otherwise
+ */
+ boolean isLoaded();
+
+ /**
+ * Get the chunk loaders that load this chunk
*
- * @return the index of the pos in the chunk.
+ * @return the chunk loaders
*/
- static int index(int x, int y, int z) {
- Preconditions.checkArgument(x >= 0 && x <= 15);
- Preconditions.checkArgument(y >= 0 && y <= 15);
- Preconditions.checkArgument(z >= 0 && z <= 15);
- // The chunk order is x-z-y in bedrock edition, however the chunk order in java version is y-z-x
- return (x << 8) + (z << 4) + y;
+ @UnmodifiableView
+ Set getChunkLoaders();
+
+ /**
+ * Get the player chunk loaders that load this chunk
+ *
+ * @return the player chunk loaders
+ */
+ @UnmodifiableView
+ default Set getPlayerChunkLoaders() {
+ return getChunkLoaders().stream()
+ .filter(EntityPlayer.class::isInstance)
+ .map(EntityPlayer.class::cast)
+ .collect(Collectors.toSet());
}
/**
- * Check if the chunk is loaded.
+ * Add a chunk loader to this chunk.
*
- * @return {@code true} if the chunk is loaded, {@code false} otherwise
+ * @param chunkLoader the chunk loader to add
*/
- boolean isLoaded();
+ void addChunkLoader(ChunkLoader chunkLoader);
+
+ /**
+ * Remove a chunk loader from this chunk.
+ *
+ * @param chunkLoader the chunk loader to remove
+ */
+ void removeChunkLoader(ChunkLoader chunkLoader);
+
+ /**
+ * Get the number of chunk loaders that load this chunk.
+ *
+ * @return the number of chunk loaders
+ */
+ int getChunkLoaderCount();
+
+ /**
+ * Add a chunk packet to the chunk.
+ *
+ * Chunk packet will be sent to all chunk loaders every tick.
+ *
+ * @param packet the packet to add
+ */
+ void addChunkPacket(BedrockPacket packet);
+
+ /**
+ * Add a chunk packet to the chunk.
+ *
+ * Chunk packet will be sent to chunk loaders that match the predicate every tick.
+ *
+ * @param packet the packet to add
+ * @param chunkLoaderPredicate the predicate to match chunk loaders
+ */
+ void addChunkPacket(BedrockPacket packet, Predicate chunkLoaderPredicate);
+
+ /**
+ * Send packet to all chunk loaders.
+ */
+ void sendChunkPacket(BedrockPacket packet);
+
+ /**
+ * Send packet to chunk loaders that match the predicate.
+ *
+ * @param packet the packet to send
+ * @param chunkLoaderPredicate the predicate to match chunk loaders
+ */
+ void sendChunkPacket(BedrockPacket packet, Predicate chunkLoaderPredicate);
/**
* Get the state of the chunk.
@@ -98,6 +160,24 @@ static int index(int x, int y, int z) {
@UnmodifiableView
Map getEntities();
+ /**
+ * Spawn entities in this chunk to the specified player.
+ *
+ * @param player the player to spawn entities to
+ */
+ default void spawnEntitiesTo(EntityPlayer player) {
+ getEntities().values().forEach(player::spawnEntity);
+ }
+
+ /**
+ * Despawn entities in this chunk from the specified player.
+ *
+ * @param player the player to despawn entities from
+ */
+ default void despawnEntitiesFrom(EntityPlayer player) {
+ getEntities().values().forEach(player::despawnEntity);
+ }
+
/**
* Remove the block entity in this chunk.
*
@@ -254,6 +334,23 @@ default BlockState getBlockState(@Range(from = 0, to = 15) int x, int y, @Range(
*/
BiomeType getBiome(@Range(from = 0, to = 15) int x, int y, @Range(from = 0, to = 15) int z);
+ /**
+ * Get a specific chunk section in this chunk.
+ *
+ * @param sectionY the sectionY of the chunk section.
+ *
+ * @return the section, should never be {@code null}
+ */
+ ChunkSection getSection(int sectionY);
+
+ /**
+ * Get all chunk sections in this chunk.
+ *
+ * @return all chunk sections in this chunk.
+ */
+ @UnmodifiableView
+ List getSections();
+
/**
* Get the hash of the chunk.
*
@@ -262,4 +359,11 @@ default BlockState getBlockState(@Range(from = 0, to = 15) int x, int y, @Range(
default long computeChunkHash() {
return HashUtils.hashXZ(getX(), getZ());
}
+
+ /**
+ * Convert this unsafe chunk to a {@link Chunk} which is safe in multithreaded environment.
+ *
+ * @return the safe chunk.
+ */
+ Chunk toSafeChunk();
}
diff --git a/api/src/main/java/org/allaymc/api/world/chunk/UnsafeChunkOperate.java b/api/src/main/java/org/allaymc/api/world/chunk/UnsafeChunkOperate.java
deleted file mode 100644
index 09b74ed9bb..0000000000
--- a/api/src/main/java/org/allaymc/api/world/chunk/UnsafeChunkOperate.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package org.allaymc.api.world.chunk;
-
-/**
- * UnsafeChunkOperate is used in {@link Chunk#batchProcess(UnsafeChunkOperate)} method.
- *
- * @author Cool_Loong
- */
-@FunctionalInterface
-public interface UnsafeChunkOperate {
- void run(UnsafeChunk unsafeChunk);
-}
diff --git a/server/src/main/java/org/allaymc/server/entity/component/EntityBaseComponentImpl.java b/server/src/main/java/org/allaymc/server/entity/component/EntityBaseComponentImpl.java
index 532dd9e0ad..573a461d0f 100644
--- a/server/src/main/java/org/allaymc/server/entity/component/EntityBaseComponentImpl.java
+++ b/server/src/main/java/org/allaymc/server/entity/component/EntityBaseComponentImpl.java
@@ -35,7 +35,7 @@
import org.allaymc.server.component.annotation.Manager;
import org.allaymc.server.component.annotation.OnInitFinish;
import org.allaymc.server.entity.component.event.*;
-import org.allaymc.server.world.chunk.AllayChunk;
+import org.allaymc.server.world.chunk.AllayUnsafeChunk;
import org.cloudburstmc.math.vector.Vector2f;
import org.cloudburstmc.nbt.NbtMap;
import org.cloudburstmc.nbt.NbtMapBuilder;
@@ -247,21 +247,32 @@ protected void checkDead() {
onDie();
}
if (dead) {
- if (deadTimer > 0) deadTimer--;
- if (deadTimer == 0) {
- // Spawn dead particle
- spawnDeadParticle();
+ if (hasDeadTimer()) {
+ if (deadTimer > 0) deadTimer--;
+ if (deadTimer == 0) {
+ // Spawn dead particle
+ spawnDeadParticle();
+ getDimension().getEntityService().removeEntity(thisEntity, () -> dead = false);
+ }
+ } else {
getDimension().getEntityService().removeEntity(thisEntity, () -> dead = false);
}
}
}
+ protected boolean hasDeadTimer() {
+ return true;
+ }
+
protected void onDie() {
new EntityDieEvent(thisEntity).call();
manager.callEvent(CEntityDieEvent.INSTANCE);
dead = true;
- deadTimer = DEFAULT_DEAD_TIMER;
+ if (hasDeadTimer()) {
+ deadTimer = DEFAULT_DEAD_TIMER;
+ }
+
applyEntityEvent(EntityEventType.DEATH, 0);
effects.values().forEach(effect -> effect.getType().onEntityDies(thisEntity, effect));
removeAllEffects();
@@ -395,11 +406,11 @@ protected boolean checkChunk(Location3fc oldLoc, Location3fc newLoc) {
// It is possible that the oldChunk is null
// For example, when spawning an entity, the entity's old location is meaningless
if (oldChunk != null) {
- ((AllayChunk) oldChunk).removeEntity(runtimeId);
+ ((AllayUnsafeChunk) oldChunk.toUnsafeChunk()).removeEntity(runtimeId);
}
}
- ((AllayChunk) newChunk).addEntity(thisEntity);
+ ((AllayUnsafeChunk) newChunk.toUnsafeChunk()).addEntity(thisEntity);
Set oldChunkPlayers = oldChunk != null ? oldChunk.getPlayerChunkLoaders() : Collections.emptySet();
Set samePlayers = new HashSet<>(newChunk.getPlayerChunkLoaders());
samePlayers.retainAll(oldChunkPlayers);
diff --git a/server/src/main/java/org/allaymc/server/entity/component/EntityPickableBaseComponentImpl.java b/server/src/main/java/org/allaymc/server/entity/component/EntityPickableBaseComponentImpl.java
index 19a8e5969a..773bccc0b0 100644
--- a/server/src/main/java/org/allaymc/server/entity/component/EntityPickableBaseComponentImpl.java
+++ b/server/src/main/java/org/allaymc/server/entity/component/EntityPickableBaseComponentImpl.java
@@ -44,6 +44,11 @@ public void tick(long currentTick) {
if (pickupDelay > 0) pickupDelay--;
}
+ @Override
+ protected boolean hasDeadTimer() {
+ return false;
+ }
+
@Override
public void loadNBT(NbtMap nbt) {
super.loadNBT(nbt);
diff --git a/server/src/main/java/org/allaymc/server/network/processor/impl/ingame/SubChunkRequestPacketProcessor.java b/server/src/main/java/org/allaymc/server/network/processor/impl/ingame/SubChunkRequestPacketProcessor.java
index e888c9d995..830692869b 100644
--- a/server/src/main/java/org/allaymc/server/network/processor/impl/ingame/SubChunkRequestPacketProcessor.java
+++ b/server/src/main/java/org/allaymc/server/network/processor/impl/ingame/SubChunkRequestPacketProcessor.java
@@ -10,8 +10,8 @@
import org.allaymc.api.world.DimensionInfo;
import org.allaymc.api.world.biome.BiomeType;
import org.allaymc.server.network.processor.PacketProcessor;
-import org.allaymc.server.world.chunk.AllayChunk;
-import org.allaymc.server.world.chunk.ChunkSection;
+import org.allaymc.server.world.chunk.AllayChunkSection;
+import org.allaymc.server.world.chunk.AllayUnsafeChunk;
import org.cloudburstmc.math.vector.Vector3i;
import org.cloudburstmc.nbt.NbtUtils;
import org.cloudburstmc.protocol.bedrock.data.HeightMapDataType;
@@ -46,7 +46,7 @@ private static void createSubChunkData(
Vector3i offset,
HeightMapDataType type,
ByteBuf heightMapData,
- ChunkSection subchunk,
+ AllayChunkSection subchunk,
Collection subChunkBlockEntities
) {
var subChunkData = new SubChunkData();
@@ -155,7 +155,7 @@ public void handleSync(EntityPlayer player, SubChunkRequestPacket packet, long r
heightMapData = Unpooled.wrappedBuffer(hMap);
}
- var subChunk = ((AllayChunk) chunk).getSection(sectionY);
+ var subChunk = ((AllayUnsafeChunk) chunk.toUnsafeChunk()).getSection(sectionY);
SubChunkRequestResult subChunkRequestResult;
if (subChunk.isAirSection()) subChunkRequestResult = SubChunkRequestResult.SUCCESS_ALL_AIR;
else subChunkRequestResult = SubChunkRequestResult.SUCCESS;
diff --git a/server/src/main/java/org/allaymc/server/world/chunk/AllayChunk.java b/server/src/main/java/org/allaymc/server/world/chunk/AllayChunk.java
index ec7a3c30af..ace79a85e1 100644
--- a/server/src/main/java/org/allaymc/server/world/chunk/AllayChunk.java
+++ b/server/src/main/java/org/allaymc/server/world/chunk/AllayChunk.java
@@ -1,45 +1,17 @@
package org.allaymc.server.world.chunk;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Sets;
-import io.netty.buffer.ByteBuf;
-import io.netty.buffer.ByteBufAllocator;
-import io.netty.buffer.ByteBufOutputStream;
-import io.netty.buffer.Unpooled;
-import io.netty.util.internal.PlatformDependent;
-import lombok.Setter;
+import lombok.experimental.Delegate;
import lombok.extern.slf4j.Slf4j;
-import org.allaymc.api.block.dto.BlockStateWithPos;
import org.allaymc.api.block.type.BlockState;
-import org.allaymc.api.blockentity.BlockEntity;
-import org.allaymc.api.entity.Entity;
-import org.allaymc.api.eventbus.event.block.BlockRandomUpdateEvent;
-import org.allaymc.api.eventbus.event.block.BlockScheduleUpdateEvent;
-import org.allaymc.api.math.position.Position3i;
-import org.allaymc.api.server.Server;
-import org.allaymc.api.utils.HashUtils;
-import org.allaymc.api.world.Dimension;
-import org.allaymc.api.world.DimensionInfo;
import org.allaymc.api.world.biome.BiomeType;
-import org.allaymc.api.world.chunk.*;
-import org.allaymc.api.world.gamerule.GameRule;
-import org.allaymc.api.world.storage.WorldStorage;
-import org.allaymc.server.blockentity.component.BlockEntityBaseComponentImpl;
-import org.allaymc.server.blockentity.impl.BlockEntityImpl;
-import org.allaymc.server.entity.component.EntityBaseComponentImpl;
-import org.allaymc.server.entity.impl.EntityImpl;
-import org.allaymc.server.world.service.AllayLightService;
-import org.cloudburstmc.nbt.NbtUtils;
-import org.cloudburstmc.protocol.bedrock.packet.BedrockPacket;
-import org.cloudburstmc.protocol.bedrock.packet.LevelChunkPacket;
-import org.jetbrains.annotations.Range;
-import org.jetbrains.annotations.UnmodifiableView;
+import org.allaymc.api.world.chunk.Chunk;
+import org.allaymc.api.world.chunk.ChunkSection;
+import org.allaymc.api.world.chunk.OperationType;
+import org.allaymc.api.world.chunk.UnsafeChunk;
-import java.util.*;
-import java.util.concurrent.ThreadLocalRandom;
-import java.util.concurrent.locks.StampedLock;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Consumer;
-import java.util.function.Predicate;
/**
* @author Cool_Loong | daoge_cmd
@@ -47,556 +19,157 @@
@Slf4j
public class AllayChunk implements Chunk {
- protected static final int LCG_CONSTANT = 1013904223;
-
+ @Delegate(types = UnsafeChunk.class)
protected final AllayUnsafeChunk unsafeChunk;
- protected final StampedLock blockLock;
- protected final StampedLock heightAndBiomeLock;
- protected final Set chunkLoaders;
- protected final Queue chunkPacketQueue;
-
- // The callback to be called when the chunk is loaded into the world
- // The provided boolean value indicated whether the chunk is set successfully
- @Setter
- protected Consumer chunkSetCallback;
- protected int autoSaveTimer = 0;
- protected int updateLCG = ThreadLocalRandom.current().nextInt();
-
- private static void checkXZ(int x, int z) {
- Preconditions.checkArgument(x >= 0 && x <= 15);
- Preconditions.checkArgument(z >= 0 && z <= 15);
- }
-
- private static void checkXYZ(int x, int y, int z) {
- Preconditions.checkArgument(x >= 0 && x <= 15);
- Preconditions.checkArgument(y >= -512 && y <= 511);
- Preconditions.checkArgument(z >= 0 && z <= 15);
- }
+ protected final ReadWriteLock heightLock;
+ protected final ChunkSectionLocks blockLocks;
+ protected final ChunkSectionLocks biomeLocks;
AllayChunk(AllayUnsafeChunk unsafeChunk) {
this.unsafeChunk = unsafeChunk;
- this.blockLock = new StampedLock();
- this.heightAndBiomeLock = new StampedLock();
- this.chunkLoaders = Sets.newConcurrentHashSet();
- this.chunkPacketQueue = PlatformDependent.newMpscQueue();
- }
-
- public void tick(long currentTick, Dimension dimension, WorldStorage worldStorage) {
- unsafeChunk.getBlockEntitiesUnsafe().values().forEach(blockEntity -> ((BlockEntityBaseComponentImpl) ((BlockEntityImpl) blockEntity).getBaseComponent()).tick(currentTick));
- unsafeChunk.getEntitiesUnsafe().values().forEach(entity -> ((EntityBaseComponentImpl) ((EntityImpl) entity).getBaseComponent()).tick(currentTick));
- tickScheduledUpdates(dimension);
- tickRandomUpdates(dimension);
-
- checkAutoSave(worldStorage);
- }
-
- protected void tickScheduledUpdates(Dimension dimension) {
- var scheduledUpdates = unsafeChunk.getScheduledUpdatesUnsafe();
- List positions = new ArrayList<>(scheduledUpdates.size() / 4);
- for (var entry : scheduledUpdates.fastEntrySet()) {
- if (entry.getValue().getDelay() <= 0) {
- positions.add(entry.getValue());
- scheduledUpdates.remove(entry.getIntKey());
- } else {
- entry.getValue().decreaseDelay();
- }
- }
-
- positions.forEach(info -> {
- var chunkXYZ = info.getChunkXYZ();
- var localX = HashUtils.getXFromHashChunkXYZ(chunkXYZ);
- var y = HashUtils.getYFromHashChunkXYZ(chunkXYZ);
- var localZ = HashUtils.getZFromHashChunkXYZ(chunkXYZ);
- var layer = info.getLayer();
-
- var blockState = getBlockState(localX, y, localZ, layer);
- var blockStateWithPos = new BlockStateWithPos(blockState, new Position3i(localX + (unsafeChunk.x << 4), y, localZ + (unsafeChunk.z << 4), dimension), layer);
- if (!new BlockScheduleUpdateEvent(blockStateWithPos).call()) {
- return;
- }
-
- blockState.getBehavior().onScheduledUpdate(blockStateWithPos);
- });
- }
-
- protected void tickRandomUpdates(Dimension dimension) {
- int randomTickSpeed = dimension.getWorld().getWorldData().getGameRuleValue(GameRule.RANDOM_TICK_SPEED);
- if (randomTickSpeed <= 0) {
- return;
- }
-
- for (var section : unsafeChunk.getSections()) {
- if (section.isAirSection()) {
- continue;
- }
- // Check the entry list of this section, and
- // if there is no block that support random tick
- // in this section, we can just skip this section
- if (section.blockLayers()[0].allEntriesMatch(blockState -> !blockState.getBehavior().canRandomUpdate())) {
- continue;
- }
-
- int sectionY = section.sectionY();
- for (int i = 0; i < randomTickSpeed * 3; i++) {
- int lcg = nextUpdateLCG();
- int localX = lcg & 0x0f;
- int localZ = lcg >>> 8 & 0x0f;
- int localY = lcg >>> 16 & 0x0f;
- // TODO: instead of get the block state from palette and check if it supports random tick,
- // we can add a bitset to every chunk section to mark whether a block pos contains a block
- // that supports random tick, this would be much quicker
- var blockState = getBlockState(localX, sectionY * 16 + localY, localZ, 0);
- if (blockState.getBehavior().canRandomUpdate()) {
- var blockStateWithPos = new BlockStateWithPos(blockState, new Position3i(localX + (unsafeChunk.x << 4), localY + (sectionY << 4), localZ + (unsafeChunk.z << 4), dimension), 0);
- if (new BlockRandomUpdateEvent(blockStateWithPos).call()) {
- blockState.getBehavior().onRandomUpdate(blockStateWithPos);
- }
- }
- }
- }
- }
- public int nextUpdateLCG() {
- return (this.updateLCG = (this.updateLCG * 3) ^ LCG_CONSTANT);
- }
-
- protected void checkAutoSave(WorldStorage worldStorage) {
- autoSaveTimer++;
- if (autoSaveTimer >= Server.SETTINGS.storageSettings().chunkAutoSaveCycle()) {
- worldStorage.writeChunk(this);
- autoSaveTimer = 0;
- }
+ // Init locks
+ var dimensionInfo = unsafeChunk.getDimensionInfo();
+ this.heightLock = new ReentrantReadWriteLock();
+ this.blockLocks = new ChunkSectionLocks(dimensionInfo);
+ this.biomeLocks = new ChunkSectionLocks(dimensionInfo);
}
@Override
public short getHeight(int x, int z) {
- checkXZ(x, z);
- var stamp = heightAndBiomeLock.tryOptimisticRead();
+ heightLock.readLock().lock();
try {
- for (; ; stamp = heightAndBiomeLock.readLock()) {
- if (stamp == 0L) continue;
- var result = unsafeChunk.getHeight(x, z);
- if (!heightAndBiomeLock.validate(stamp)) continue;
- return result;
- }
+ return unsafeChunk.getHeight(x, z);
} finally {
- if (StampedLock.isReadLockStamp(stamp)) heightAndBiomeLock.unlockRead(stamp);
+ heightLock.readLock().unlock();
}
}
@Override
public void setHeight(int x, int z, short height) {
- checkXZ(x, z);
- Preconditions.checkArgument(height >= -512 && height <= 511);
- var stamp = heightAndBiomeLock.writeLock();
+ heightLock.writeLock().lock();
try {
unsafeChunk.setHeight(x, z, height);
} finally {
- heightAndBiomeLock.unlockWrite(stamp);
- }
- }
-
- @Override
- public void compareAndSetHeight(int x, int z, short expectedValue, short newValue) {
- checkXZ(x, z);
- Preconditions.checkArgument(expectedValue >= -512 && expectedValue <= 511);
- Preconditions.checkArgument(newValue >= -512 && newValue <= 511);
- var stamp = heightAndBiomeLock.tryOptimisticRead();
- try {
- for (; ; stamp = heightAndBiomeLock.writeLock()) {
- if (stamp == 0L) continue;
- var oldValue = unsafeChunk.getHeight(x, z);
- if (!heightAndBiomeLock.validate(stamp)) continue;
- if (oldValue != expectedValue) break;
- stamp = heightAndBiomeLock.tryConvertToWriteLock(stamp);
- if (stamp != 0L) {
- unsafeChunk.setHeight(x, z, newValue);
- return;
- }
- }
- } finally {
- if (StampedLock.isWriteLockStamp(stamp)) heightAndBiomeLock.unlockWrite(stamp);
+ heightLock.writeLock().unlock();
}
}
@Override
public BlockState getBlockState(int x, int y, int z, int layer) {
- // Do not check y here, as if y is invalid we will return air
- // See test AllayChunkTest.testInvalidGetBlockStateMethodCall()
- checkXZ(x, z);
- var stamp = blockLock.tryOptimisticRead();
+ var sectionY = y >> 4;
+ blockLocks.lockReadLockAt(sectionY);
try {
- for (; ; stamp = blockLock.readLock()) {
- if (stamp == 0L) continue;
- var result = unsafeChunk.getBlockState(x, y, z, layer);
- if (blockLock.validate(stamp)) {
- return result;
- }
- }
+ return unsafeChunk.getBlockState(x, y, z, layer);
} finally {
- if (StampedLock.isReadLockStamp(stamp)) blockLock.unlockRead(stamp);
+ blockLocks.unlockReadLockAt(sectionY);
}
}
@Override
public void setBlockState(int x, int y, int z, BlockState blockState, int layer, boolean send) {
- checkXYZ(x, y, z);
- var stamp = blockLock.writeLock();
+ var sectionY = y >> 4;
+ blockLocks.lockWriteLockAt(sectionY);
try {
unsafeChunk.setBlockState(x, y, z, blockState, layer, send);
} finally {
- blockLock.unlockWrite(stamp);
- }
- }
-
- @Override
- public void compareAndSetBiome(int x, int y, int z, BiomeType expectedValue, BiomeType newValue) {
- checkXYZ(x, y, z);
- var stamp = heightAndBiomeLock.tryOptimisticRead();
- try {
- for (; ; stamp = heightAndBiomeLock.writeLock()) {
- if (stamp == 0L) continue;
- var oldValue = unsafeChunk.getBiome(x, y, z);
- if (!heightAndBiomeLock.validate(stamp)) continue;
- if (oldValue != expectedValue) break;
- stamp = heightAndBiomeLock.tryConvertToWriteLock(stamp);
- if (stamp == 0L) continue;
- unsafeChunk.setBiome(x, y, z, newValue);
- return;
- }
- } finally {
- if (StampedLock.isWriteLockStamp(stamp)) heightAndBiomeLock.unlockWrite(stamp);
+ blockLocks.unlockWriteLockAt(sectionY);
}
}
@Override
public BiomeType getBiome(int x, int y, int z) {
- checkXYZ(x, y, z);
- var stamp = heightAndBiomeLock.tryOptimisticRead();
+ var sectionY = y >> 4;
+ biomeLocks.lockReadLockAt(sectionY);
try {
- for (; ; stamp = heightAndBiomeLock.readLock()) {
- if (stamp == 0L) continue;
- var biomeType = unsafeChunk.getBiome(x, y, z);
- if (heightAndBiomeLock.validate(stamp)) {
- return biomeType;
- }
- }
+ return unsafeChunk.getBiome(x, y, z);
} finally {
- if (StampedLock.isReadLockStamp(stamp)) heightAndBiomeLock.unlockRead(stamp);
+ biomeLocks.unlockReadLockAt(sectionY);
}
}
@Override
public void setBiome(int x, int y, int z, BiomeType biomeType) {
- checkXYZ(x, y, z);
- var stamp = heightAndBiomeLock.writeLock();
+ var sectionY = y >> 4;
+ biomeLocks.lockWriteLockAt(sectionY);
try {
unsafeChunk.setBiome(x, y, z, biomeType);
} finally {
- heightAndBiomeLock.unlockWrite(stamp);
+ biomeLocks.unlockWriteLockAt(sectionY);
}
}
@Override
- public void compareAndSetBlock(int x, int y, int z, BlockState expectedValue, BlockState newValue, int layer) {
- checkXYZ(x, y, z);
- var stamp = blockLock.tryOptimisticRead();
+ public void applyOperation(Consumer operation, OperationType block, OperationType biome, OperationType height) {
+ tryLockAllSections(block, blockLocks);
+ tryLockAllSections(biome, biomeLocks);
+ tryLock(height, heightLock);
try {
- for (; ; stamp = blockLock.writeLock()) {
- if (stamp == 0L) continue;
- var oldValue = unsafeChunk.getBlockState(x, y, z, layer);
- if (!blockLock.validate(stamp)) continue;
- if (oldValue != expectedValue) break;
- stamp = blockLock.tryConvertToWriteLock(stamp);
- if (stamp == 0L) continue;
- unsafeChunk.setBlockState(x, y, z, newValue, layer);
- return;
- }
+ operation.accept(unsafeChunk);
} finally {
- if (StampedLock.isWriteLockStamp(stamp)) blockLock.unlockWrite(stamp);
+ tryUnlockAllSections(block, blockLocks);
+ tryUnlockAllSections(biome, biomeLocks);
+ tryUnlock(height, heightLock);
}
}
@Override
- public void batchProcess(UnsafeChunkOperate operate) {
- var stamp1 = blockLock.writeLock();
- var stamp2 = heightAndBiomeLock.writeLock();
+ public void applyOperationInSection(int sectionY, Consumer operation, OperationType block, OperationType biome) {
+ tryLockSection(sectionY, block, blockLocks);
+ tryLockSection(sectionY, biome, biomeLocks);
try {
- operate.run(this.unsafeChunk);
+ operation.accept(unsafeChunk.getSection(sectionY));
} finally {
- blockLock.unlockWrite(stamp1);
- heightAndBiomeLock.unlockWrite(stamp2);
+ tryUnlockSection(sectionY, block, blockLocks);
+ tryUnlockSection(sectionY, biome, biomeLocks);
}
}
- @Override
- public UnsafeChunk toUnsafeChunk() {
- return unsafeChunk;
- }
-
- public LevelChunkPacket createSubChunkLevelChunkPacket() {
- var levelChunkPacket = new LevelChunkPacket();
- levelChunkPacket.setDimension(getDimensionInfo().dimensionId());
- levelChunkPacket.setChunkX(this.getX());
- levelChunkPacket.setChunkZ(this.getZ());
- levelChunkPacket.setCachingEnabled(false);
- levelChunkPacket.setRequestSubChunks(true);
- // This value is used in the subchunk system to control the maximum value of sectionY requested by the client.
- levelChunkPacket.setSubChunkLimit(getDimensionInfo().chunkSectionCount());
- levelChunkPacket.setData(Unpooled.EMPTY_BUFFER);
- return levelChunkPacket;
- }
-
- public LevelChunkPacket createFullLevelChunkPacketChunk() {
- var levelChunkPacket = new LevelChunkPacket();
- levelChunkPacket.setDimension(getDimensionInfo().dimensionId());
- levelChunkPacket.setChunkX(this.getX());
- levelChunkPacket.setChunkZ(this.getZ());
- levelChunkPacket.setCachingEnabled(false);
- levelChunkPacket.setRequestSubChunks(false);
- levelChunkPacket.setSubChunksLength(getDimensionInfo().chunkSectionCount());
- try {
- levelChunkPacket.setData(writeToNetwork());
- } catch (Throwable t) {
- levelChunkPacket.setData(Unpooled.EMPTY_BUFFER);
+ protected void tryLockAllSections(OperationType operationType, ChunkSectionLocks locks) {
+ switch (operationType) {
+ case READ -> locks.lockAllReadLocks();
+ case WRITE -> locks.lockAllWriteLocks();
}
- return levelChunkPacket;
}
- private ByteBuf writeToNetwork() {
- var byteBuf = ByteBufAllocator.DEFAULT.buffer();
- try {
- writeToNetwork0(byteBuf);
- return byteBuf;
- } catch (Throwable t) {
- log.error("Error while encoding chunk(x={}, z={})!", getX(), getZ(), t);
- byteBuf.release();
- throw t;
+ protected void tryLockSection(int sectionY, OperationType operationType, ChunkSectionLocks locks) {
+ switch (operationType) {
+ case READ -> locks.lockReadLockAt(sectionY);
+ case WRITE -> locks.lockWriteLockAt(sectionY);
}
}
- private void writeToNetwork0(ByteBuf byteBuf) {
- // Write blocks
- for (int i = getDimensionInfo().minSectionY(); i <= getDimensionInfo().maxSectionY(); i++) {
- getSection(i).writeToNetwork(byteBuf);
- }
- // Write biomes
- Arrays.stream(getSections()).forEach(section -> section.biomes().writeToNetwork(byteBuf, BiomeType::getId));
- byteBuf.writeByte(0); // edu- border blocks
- // Write block entities
- var blockEntities = getBlockEntities().values();
- if (!blockEntities.isEmpty()) {
- try (var writer = NbtUtils.createNetworkWriter(new ByteBufOutputStream(byteBuf))) {
- for (var blockEntity : blockEntities) {
- writer.writeTag(blockEntity.saveNBT());
- }
- } catch (Throwable t) {
- log.error("Error while encoding block entities in chunk(x={}, z={})!", getX(), getZ(), t);
- }
+ protected void tryUnlockAllSections(OperationType operationType, ChunkSectionLocks locks) {
+ switch (operationType) {
+ case READ -> locks.unlockAllReadLocks();
+ case WRITE -> locks.unlockAllWriteLocks();
}
}
- @Override
- public boolean isLoaded() {
- return unsafeChunk.isLoaded();
- }
-
- @Override
- public ChunkState getState() {
- return unsafeChunk.getState();
- }
-
- public void setState(ChunkState next) {
- unsafeChunk.setState(next);
- }
-
- @Override
- public DimensionInfo getDimensionInfo() {
- return unsafeChunk.getDimensionInfo();
- }
-
- @Override
- public int getX() {
- return unsafeChunk.getX();
- }
-
- @Override
- public int getZ() {
- return unsafeChunk.getZ();
- }
-
- public void addEntity(Entity entity) {
- unsafeChunk.addEntity(entity);
- }
-
- public Entity removeEntity(long runtimeId) {
- return unsafeChunk.removeEntity(runtimeId);
- }
-
- @Override
- public @UnmodifiableView Map getEntities() {
- return unsafeChunk.getEntities();
- }
-
- @Override
- public void addBlockEntity(BlockEntity blockEntity) {
- Preconditions.checkNotNull(blockEntity);
- unsafeChunk.addBlockEntity(blockEntity);
- }
-
- @Override
- public BlockEntity removeBlockEntity(int x, int y, int z) {
- return unsafeChunk.removeBlockEntity(x, y, z);
- }
-
- @Override
- public BlockEntity getBlockEntity(int x, int y, int z) {
- return unsafeChunk.getBlockEntity(x, y, z);
- }
-
- @Override
- public @UnmodifiableView Map getBlockEntities() {
- return unsafeChunk.getBlockEntities();
- }
-
- @Override
- public Entity getEntity(long runtimeId) {
- return unsafeChunk.getEntity(runtimeId);
- }
-
- public ChunkSection getSection(int sectionY) {
- Preconditions.checkArgument(sectionY >= -32 && sectionY <= 31);
- var stamp = blockLock.tryOptimisticRead();
- try {
- for (; ; stamp = blockLock.readLock()) {
- if (stamp == 0L) continue;
- var section = unsafeChunk.getSection(sectionY);
- if (blockLock.validate(stamp)) {
- return section;
- }
- }
- } finally {
- if (StampedLock.isReadLockStamp(stamp)) blockLock.unlockRead(stamp);
+ protected void tryUnlockSection(int sectionY, OperationType operationType, ChunkSectionLocks locks) {
+ switch (operationType) {
+ case READ -> locks.unlockReadLockAt(sectionY);
+ case WRITE -> locks.unlockWriteLockAt(sectionY);
}
}
- public ChunkSection[] getSections() {
- return unsafeChunk.getSections();
- }
-
- public void beforeSetChunk(Dimension dimension) {
- unsafeChunk.beforeSetChunk(dimension);
- }
-
- public void afterSetChunk(Dimension dimension, boolean success) {
- if (chunkSetCallback != null) {
- chunkSetCallback.accept(success);
- }
- unsafeChunk.afterSetChunk(dimension, success);
-
- if (!success) {
- return;
+ protected void tryLock(OperationType operationType, ReadWriteLock lock) {
+ switch (operationType) {
+ case READ -> lock.readLock().lock();
+ case WRITE -> lock.writeLock().lock();
}
-
- unsafeChunk.setBlockChangeCallback((x, y, z, blockState, layer) -> {
- if (layer != 0) {
- return;
- }
- ((AllayLightService) dimension.getLightService()).onBlockChange(x + (unsafeChunk.x << 4), y, z + (unsafeChunk.z << 4), blockState.getBlockStateData().lightEmission(), blockState.getBlockStateData().lightDampening());
- });
- ((AllayLightService) dimension.getLightService()).onChunkLoad(this);
- }
-
- @Override
- public @UnmodifiableView Collection getSectionBlockEntities(int sectionY) {
- Preconditions.checkArgument(sectionY >= -32 && sectionY <= 31);
- return unsafeChunk.getSectionBlockEntities(sectionY);
}
- @Override
- public void addScheduledUpdate(@Range(from = 0, to = 15) int x, int y, @Range(from = 0, to = 15) int z, int delay, int layer) {
- unsafeChunk.addScheduledUpdate(x, y, z, delay, layer);
- }
-
- @Override
- public boolean hasScheduledUpdate(@Range(from = 0, to = 15) int x, int y, @Range(from = 0, to = 15) int z, int layer) {
- return unsafeChunk.hasScheduledUpdate(x, y, z, layer);
- }
-
- @Override
- public void sendChunkPackets() {
- if (chunkLoaders.isEmpty()) {
- unsafeChunk.clearBlockChanges();
- chunkPacketQueue.clear();
- return;
- }
-
- // Send block updates
- var pks = unsafeChunk.encodeAndClearBlockChanges();
- // pks == null -> no block changes
- if (pks != null) {
- for (var pk : pks) {
- if (pk == null) {
- continue;
- }
-
- sendChunkPacket(pk);
- }
- }
-
- // Send other chunk packets
- if (chunkPacketQueue.isEmpty()) {
- return;
- }
- ChunkPacketEntry entry;
- while ((entry = chunkPacketQueue.poll()) != null) {
- sendChunkPacket(entry.packet(), entry.chunkLoaderPredicate());
+ protected void tryUnlock(OperationType operationType, ReadWriteLock lock) {
+ switch (operationType) {
+ case READ -> lock.readLock().unlock();
+ case WRITE -> lock.writeLock().unlock();
}
}
@Override
- public void addChunkPacket(BedrockPacket packet) {
- chunkPacketQueue.add(new ChunkPacketEntry(packet, null));
- }
-
- @Override
- public void addChunkPacket(BedrockPacket packet, Predicate chunkLoaderPredicate) {
- chunkPacketQueue.add(new ChunkPacketEntry(packet, chunkLoaderPredicate));
- }
-
- @Override
- @UnmodifiableView
- public Set getChunkLoaders() {
- return Collections.unmodifiableSet(chunkLoaders);
- }
-
- @Override
- public void addChunkLoader(ChunkLoader chunkLoader) {
- chunkLoaders.add(chunkLoader);
- }
-
- @Override
- public void removeChunkLoader(ChunkLoader chunkLoader) {
- chunkLoaders.remove(chunkLoader);
- }
-
- @Override
- public int getChunkLoaderCount() {
- return chunkLoaders.size();
- }
-
- @Override
- public void sendChunkPacket(BedrockPacket packet) {
- chunkLoaders.forEach(chunkLoader -> chunkLoader.sendPacket(packet));
- }
-
- @Override
- public void sendChunkPacket(BedrockPacket packet, Predicate chunkLoaderPredicate) {
- chunkLoaders.stream()
- .filter(chunkLoader -> chunkLoaderPredicate == null || chunkLoaderPredicate.test(chunkLoader))
- .forEach(chunkLoader -> chunkLoader.sendPacket(packet));
+ public UnsafeChunk toUnsafeChunk() {
+ return unsafeChunk;
}
-
- protected record ChunkPacketEntry(BedrockPacket packet, Predicate chunkLoaderPredicate) {}
}
diff --git a/server/src/main/java/org/allaymc/server/world/chunk/AllayChunkBuilder.java b/server/src/main/java/org/allaymc/server/world/chunk/AllayChunkBuilder.java
index 70abfcce8c..8c105300b9 100644
--- a/server/src/main/java/org/allaymc/server/world/chunk/AllayChunkBuilder.java
+++ b/server/src/main/java/org/allaymc/server/world/chunk/AllayChunkBuilder.java
@@ -20,16 +20,16 @@ public class AllayChunkBuilder {
private int chunkX;
private int chunkZ;
private DimensionInfo dimensionInfo;
- private ChunkSection[] sections;
+ private AllayChunkSection[] sections;
private HeightMap heightMap;
private List entitiyList;
private List blockEntitiyList;
private Int2ObjectNonBlockingMap scheduledUpdates;
- private static ChunkSection[] createEmptySections(DimensionInfo dimensionInfo) {
- var sections = new ChunkSection[dimensionInfo.chunkSectionCount()];
+ private static AllayChunkSection[] createEmptySections(DimensionInfo dimensionInfo) {
+ var sections = new AllayChunkSection[dimensionInfo.chunkSectionCount()];
for (int i = 0; i < sections.length; i++) {
- sections[i] = new ChunkSection((byte) (i + dimensionInfo.minSectionY()));
+ sections[i] = new AllayChunkSection((byte) (i + dimensionInfo.minSectionY()));
}
return sections;
}
@@ -55,7 +55,7 @@ public AllayChunkBuilder dimensionInfo(DimensionInfo dimensionInfo) {
return this;
}
- public AllayChunkBuilder sections(ChunkSection[] sections) {
+ public AllayChunkBuilder sections(AllayChunkSection[] sections) {
Preconditions.checkNotNull(dimensionInfo);
Preconditions.checkArgument(sections.length == dimensionInfo.chunkSectionCount());
for (int index = 0; index < sections.length; index++) {
diff --git a/server/src/main/java/org/allaymc/server/world/chunk/ChunkSection.java b/server/src/main/java/org/allaymc/server/world/chunk/AllayChunkSection.java
similarity index 64%
rename from server/src/main/java/org/allaymc/server/world/chunk/ChunkSection.java
rename to server/src/main/java/org/allaymc/server/world/chunk/AllayChunkSection.java
index 76604801b1..6d5d5380bd 100644
--- a/server/src/main/java/org/allaymc/server/world/chunk/ChunkSection.java
+++ b/server/src/main/java/org/allaymc/server/world/chunk/AllayChunkSection.java
@@ -4,46 +4,52 @@
import org.allaymc.api.block.type.BlockState;
import org.allaymc.api.world.biome.BiomeId;
import org.allaymc.api.world.biome.BiomeType;
+import org.allaymc.api.world.chunk.ChunkSection;
import org.allaymc.server.datastruct.palette.Palette;
import org.allaymc.server.world.storage.ChunkSectionVersion;
import javax.annotation.concurrent.NotThreadSafe;
import static org.allaymc.api.block.type.BlockTypes.AIR;
-import static org.allaymc.api.world.chunk.UnsafeChunk.index;
+import static org.allaymc.api.utils.HashUtils.hashChunkSectionXYZ;
/**
* @author Cool_Loong | daoge_cmd
*/
@NotThreadSafe
-public record ChunkSection(byte sectionY, Palette[] blockLayers, Palette biomes) {
+public record AllayChunkSection(byte sectionY, Palette[] blockLayers,
+ Palette biomes) implements ChunkSection {
public static final int LAYER_COUNT = 2;
public static final int CURRENT_CHUNK_SECTION_VERSION = ChunkSectionVersion.PALETTED_MULTI_WITH_OFFSET.ordinal();
@SuppressWarnings("unchecked")
- public ChunkSection(byte sectionY) {
+ public AllayChunkSection(byte sectionY) {
this(sectionY, new Palette[]{new Palette<>(AIR.getDefaultState()), new Palette<>(AIR.getDefaultState())}, new Palette<>(BiomeId.PLAINS));
}
- public ChunkSection(byte sectionY, Palette[] blockLayer) {
+ public AllayChunkSection(byte sectionY, Palette[] blockLayer) {
this(sectionY, blockLayer, new Palette<>(BiomeId.PLAINS));
}
+ @Override
public BlockState getBlockState(int x, int y, int z, int layer) {
- return blockLayers[layer].get(index(x, y, z));
+ return blockLayers[layer].get(hashChunkSectionXYZ(x, y, z));
}
+ @Override
public void setBlockState(int x, int y, int z, BlockState blockState, int layer) {
- blockLayers[layer].set(index(x, y, z), blockState);
+ blockLayers[layer].set(hashChunkSectionXYZ(x, y, z), blockState);
}
- public void setBiomeType(int x, int y, int z, BiomeType biomeType) {
- biomes.set(index(x, y, z), biomeType);
+ @Override
+ public BiomeType getBiomeType(int x, int y, int z) {
+ return biomes.get(hashChunkSectionXYZ(x, y, z));
}
- public BiomeType getBiomeType(int x, int y, int z) {
- return biomes.get(index(x, y, z));
+ @Override
+ public void setBiomeType(int x, int y, int z, BiomeType biomeType) {
+ biomes.set(hashChunkSectionXYZ(x, y, z), biomeType);
}
public boolean isAirSection() {
@@ -57,6 +63,8 @@ public void writeToNetwork(ByteBuf byteBuf) {
// Extra byte since version 9
byteBuf.writeByte(sectionY & 0xFF);
- for (var blockLayer : blockLayers) blockLayer.writeToNetwork(byteBuf, BlockState::blockStateHash);
+ for (var blockLayer : blockLayers) {
+ blockLayer.writeToNetwork(byteBuf, BlockState::blockStateHash);
+ }
}
}
diff --git a/server/src/main/java/org/allaymc/server/world/chunk/AllayUnsafeChunk.java b/server/src/main/java/org/allaymc/server/world/chunk/AllayUnsafeChunk.java
index 8ae82c1fcb..ae4651ba3d 100644
--- a/server/src/main/java/org/allaymc/server/world/chunk/AllayUnsafeChunk.java
+++ b/server/src/main/java/org/allaymc/server/world/chunk/AllayUnsafeChunk.java
@@ -1,74 +1,104 @@
package org.allaymc.server.world.chunk;
import com.google.common.base.Preconditions;
+import com.google.common.collect.Sets;
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufAllocator;
+import io.netty.buffer.ByteBufOutputStream;
+import io.netty.buffer.Unpooled;
+import io.netty.util.internal.PlatformDependent;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import lombok.Getter;
import lombok.Setter;
+import lombok.extern.slf4j.Slf4j;
+import org.allaymc.api.block.dto.BlockStateWithPos;
import org.allaymc.api.block.type.BlockState;
import org.allaymc.api.block.type.BlockTypes;
import org.allaymc.api.blockentity.BlockEntity;
import org.allaymc.api.blockentity.BlockEntityHelper;
import org.allaymc.api.entity.Entity;
import org.allaymc.api.entity.EntityHelper;
+import org.allaymc.api.eventbus.event.block.BlockRandomUpdateEvent;
+import org.allaymc.api.eventbus.event.block.BlockScheduleUpdateEvent;
+import org.allaymc.api.math.position.Position3i;
+import org.allaymc.api.server.Server;
import org.allaymc.api.utils.HashUtils;
import org.allaymc.api.world.Dimension;
import org.allaymc.api.world.DimensionInfo;
import org.allaymc.api.world.biome.BiomeType;
-import org.allaymc.api.world.chunk.Chunk;
-import org.allaymc.api.world.chunk.ChunkState;
-import org.allaymc.api.world.chunk.UnsafeChunk;
+import org.allaymc.api.world.chunk.*;
+import org.allaymc.api.world.gamerule.GameRule;
+import org.allaymc.api.world.storage.WorldStorage;
+import org.allaymc.server.blockentity.component.BlockEntityBaseComponentImpl;
+import org.allaymc.server.blockentity.impl.BlockEntityImpl;
import org.allaymc.server.datastruct.collections.nb.Int2ObjectNonBlockingMap;
import org.allaymc.server.datastruct.collections.nb.Long2ObjectNonBlockingMap;
+import org.allaymc.server.entity.component.EntityBaseComponentImpl;
+import org.allaymc.server.entity.impl.EntityImpl;
import org.allaymc.server.world.HeightMap;
+import org.allaymc.server.world.service.AllayLightService;
import org.cloudburstmc.math.vector.Vector3i;
import org.cloudburstmc.nbt.NbtMap;
+import org.cloudburstmc.nbt.NbtUtils;
import org.cloudburstmc.protocol.bedrock.data.BlockChangeEntry;
+import org.cloudburstmc.protocol.bedrock.packet.BedrockPacket;
+import org.cloudburstmc.protocol.bedrock.packet.LevelChunkPacket;
import org.cloudburstmc.protocol.bedrock.packet.UpdateSubChunkBlocksPacket;
import org.jetbrains.annotations.Range;
-import org.jetbrains.annotations.UnmodifiableView;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+import java.util.*;
+import java.util.concurrent.ThreadLocalRandom;
+import java.util.function.Consumer;
+import java.util.function.Predicate;
import java.util.stream.Collectors;
/**
* @author Cool_Loong | daoge_cmd
*/
+@Slf4j
public class AllayUnsafeChunk implements UnsafeChunk {
- // Constants used in UpdateSubChunkBlocksPacket
- private static final int BLOCK_UPDATE_NEIGHBORS = 0b0001;
- private static final int BLOCK_UPDATE_NETWORK = 0b0010;
- private static final int BLOCK_UPDATE_NO_GRAPHICS = 0b0100;
- private static final int BLOCK_UPDATE_PRIORITY = 0b1000;
+ protected static final int LCG_CONSTANT = 1013904223;
- private static final AtomicReferenceFieldUpdater STATE_FIELD = AtomicReferenceFieldUpdater.newUpdater(AllayUnsafeChunk.class, ChunkState.class, "state");
+ // Constants used in UpdateSubChunkBlocksPacket
+ protected static final int BLOCK_UPDATE_NEIGHBORS = 0b0001;
+ protected static final int BLOCK_UPDATE_NETWORK = 0b0010;
+ protected static final int BLOCK_UPDATE_NO_GRAPHICS = 0b0100;
+ protected static final int BLOCK_UPDATE_PRIORITY = 0b1000;
@Getter
protected final int x, z;
@Getter
protected final DimensionInfo dimensionInfo;
- @Getter
- protected final ChunkSection[] sections;
+ protected final AllayChunkSection[] sections;
@Getter
protected final HeightMap heightMap;
- protected final Long2ObjectNonBlockingMap entities;
- protected final Int2ObjectNonBlockingMap blockEntities;
+ @Getter
protected final Int2ObjectNonBlockingMap scheduledUpdates;
- protected final Int2ObjectOpenHashMap blockChangeEntries;
- protected final Int2ObjectOpenHashMap extraBlockChangeEntries;
@Getter
+ @Setter
protected volatile ChunkState state;
- @Getter
- protected volatile boolean loaded;
-
protected List entityNbtList;
protected List blockEntityNbtList;
+
+ protected final Int2ObjectOpenHashMap blockChangeEntries;
+ protected final Int2ObjectOpenHashMap extraBlockChangeEntries;
+ protected final Long2ObjectNonBlockingMap entities;
+ protected final Int2ObjectNonBlockingMap blockEntities;
+ protected final Set chunkLoaders;
+ protected final Queue chunkPacketQueue;
+ protected final AllayChunk safeChunk;
+
+ // The callback to be called when the chunk is loaded into the world
+ // The provided boolean value indicated whether the chunk is set successfully
+ @Setter
+ protected Consumer chunkSetCallback;
@Setter
protected BlockChangeCallback blockChangeCallback;
+ @Getter
+ protected volatile boolean loaded;
+ protected int autoSaveTimer = 0;
+ protected int updateLCG = ThreadLocalRandom.current().nextInt();
/**
* Create a new {@link AllayUnsafeChunk}.
@@ -84,7 +114,7 @@ public class AllayUnsafeChunk implements UnsafeChunk {
*/
AllayUnsafeChunk(
int x, int z, DimensionInfo dimensionInfo,
- ChunkSection[] sections, HeightMap heightMap,
+ AllayChunkSection[] sections, HeightMap heightMap,
Int2ObjectNonBlockingMap scheduledUpdates,
ChunkState state, List entityNbtList,
List blockEntityNbtList) {
@@ -94,13 +124,16 @@ public class AllayUnsafeChunk implements UnsafeChunk {
this.sections = sections;
this.heightMap = heightMap;
this.scheduledUpdates = scheduledUpdates;
+ this.state = state;
+ this.entityNbtList = entityNbtList;
+ this.blockEntityNbtList = blockEntityNbtList;
this.blockChangeEntries = new Int2ObjectOpenHashMap<>();
this.extraBlockChangeEntries = new Int2ObjectOpenHashMap<>();
this.entities = new Long2ObjectNonBlockingMap<>();
this.blockEntities = new Int2ObjectNonBlockingMap<>();
- this.state = state;
- this.entityNbtList = entityNbtList;
- this.blockEntityNbtList = blockEntityNbtList;
+ this.chunkLoaders = Sets.newConcurrentHashSet();
+ this.chunkPacketQueue = PlatformDependent.newMpscQueue();
+ this.safeChunk = new AllayChunk(this);
}
public static AllayChunkBuilder builder() {
@@ -112,6 +145,101 @@ private static void checkXZ(int x, int z) {
Preconditions.checkArgument(z >= 0 && z <= 15);
}
+ private void checkY(int y) {
+ Preconditions.checkArgument(y >= dimensionInfo.minHeight() && y <= dimensionInfo.maxHeight());
+ }
+
+ private void checkXYZ(int x, int y, int z) {
+ Preconditions.checkArgument(x >= 0 && x <= 15);
+ checkY(y);
+ Preconditions.checkArgument(z >= 0 && z <= 15);
+ }
+
+ public void tick(long currentTick, Dimension dimension, WorldStorage worldStorage) {
+ blockEntities.values().forEach(blockEntity -> ((BlockEntityBaseComponentImpl) ((BlockEntityImpl) blockEntity).getBaseComponent()).tick(currentTick));
+ entities.values().forEach(entity -> ((EntityBaseComponentImpl) ((EntityImpl) entity).getBaseComponent()).tick(currentTick));
+ tickScheduledUpdates(dimension);
+ tickRandomUpdates(dimension);
+ checkAutoSave(worldStorage);
+ }
+
+ protected void tickScheduledUpdates(Dimension dimension) {
+ List positions = new ArrayList<>(scheduledUpdates.size() / 4);
+ for (var entry : scheduledUpdates.fastEntrySet()) {
+ if (entry.getValue().getDelay() <= 0) {
+ positions.add(entry.getValue());
+ scheduledUpdates.remove(entry.getIntKey());
+ } else {
+ entry.getValue().decreaseDelay();
+ }
+ }
+
+ positions.forEach(info -> {
+ var chunkXYZ = info.getChunkXYZ();
+ var localX = HashUtils.getXFromHashChunkXYZ(chunkXYZ);
+ var y = HashUtils.getYFromHashChunkXYZ(chunkXYZ);
+ var localZ = HashUtils.getZFromHashChunkXYZ(chunkXYZ);
+ var layer = info.getLayer();
+
+ var blockState = getBlockState(localX, y, localZ, layer);
+ var blockStateWithPos = new BlockStateWithPos(blockState, new Position3i(localX + (this.x << 4), y, localZ + (this.z << 4), dimension), layer);
+ if (!new BlockScheduleUpdateEvent(blockStateWithPos).call()) {
+ return;
+ }
+
+ blockState.getBehavior().onScheduledUpdate(blockStateWithPos);
+ });
+ }
+
+ protected void tickRandomUpdates(Dimension dimension) {
+ int randomTickSpeed = dimension.getWorld().getWorldData().getGameRuleValue(GameRule.RANDOM_TICK_SPEED);
+ if (randomTickSpeed <= 0) {
+ return;
+ }
+
+ for (var section : sections) {
+ if (section.isAirSection()) {
+ continue;
+ }
+ // Check the entry list of this section, and
+ // if there is no block that support random tick
+ // in this section, we can just skip this section
+ if (section.blockLayers()[0].allEntriesMatch(blockState -> !blockState.getBehavior().canRandomUpdate())) {
+ continue;
+ }
+
+ int sectionY = section.sectionY();
+ for (int i = 0; i < randomTickSpeed * 3; i++) {
+ int lcg = nextUpdateLCG();
+ int localX = lcg & 0x0f;
+ int localZ = lcg >>> 8 & 0x0f;
+ int localY = lcg >>> 16 & 0x0f;
+ // TODO: instead of get the block state from palette and check if it supports random tick,
+ // we can add a bitset to every chunk section to mark whether a block pos contains a block
+ // that supports random tick, this would be much quicker
+ var blockState = getBlockState(localX, sectionY * 16 + localY, localZ, 0);
+ if (blockState.getBehavior().canRandomUpdate()) {
+ var blockStateWithPos = new BlockStateWithPos(blockState, new Position3i(localX + (this.x << 4), localY + (sectionY << 4), localZ + (this.z << 4), dimension), 0);
+ if (new BlockRandomUpdateEvent(blockStateWithPos).call()) {
+ blockState.getBehavior().onRandomUpdate(blockStateWithPos);
+ }
+ }
+ }
+ }
+ }
+
+ public int nextUpdateLCG() {
+ return (this.updateLCG = (this.updateLCG * 3) ^ LCG_CONSTANT);
+ }
+
+ protected void checkAutoSave(WorldStorage worldStorage) {
+ autoSaveTimer++;
+ if (autoSaveTimer >= Server.SETTINGS.storageSettings().chunkAutoSaveCycle()) {
+ worldStorage.writeChunk(safeChunk);
+ autoSaveTimer = 0;
+ }
+ }
+
public void beforeSetChunk(Dimension dimension) {
if (blockEntityNbtList != null && !blockEntityNbtList.isEmpty()) {
for (var nbt : blockEntityNbtList) {
@@ -128,6 +256,9 @@ public void beforeSetChunk(Dimension dimension) {
}
public void afterSetChunk(Dimension dimension, boolean success) {
+ if (chunkSetCallback != null) {
+ chunkSetCallback.accept(success);
+ }
if (!success) {
return;
}
@@ -141,29 +272,28 @@ public void afterSetChunk(Dimension dimension, boolean success) {
entityNbtList = null;
}
- loaded = true;
- }
+ setBlockChangeCallback((x, y, z, blockState, layer) -> {
+ if (layer != 0) {
+ return;
+ }
+ ((AllayLightService) dimension.getLightService()).onBlockChange(x + (this.x << 4), y, z + (this.z << 4), blockState.getBlockStateData().lightEmission(), blockState.getBlockStateData().lightDampening());
+ });
+ ((AllayLightService) dimension.getLightService()).onChunkLoad(toSafeChunk());
- private void checkXYZ(int x, int y, int z) {
- Preconditions.checkArgument(x >= 0 && x <= 15);
- Preconditions.checkArgument(y >= dimensionInfo.minHeight() && y <= dimensionInfo.maxHeight());
- Preconditions.checkArgument(z >= 0 && z <= 15);
+ loaded = true;
}
- /**
- * Get Chunk section.
- *
- * @param sectionY the sectionY.
- *
- * @return the section, or {@code null} if not exist.
- */
- public ChunkSection getSection(int sectionY) {
+ @Override
+ public AllayChunkSection getSection(int sectionY) {
Preconditions.checkArgument(sectionY >= -32 && sectionY <= 31);
return sections[sectionY - this.getDimensionInfo().minSectionY()];
}
+ @Override
+ public List getSections() {
+ return List.of(sections);
+ }
- @UnmodifiableView
@Override
public Collection getSectionBlockEntities(int sectionY) {
Preconditions.checkArgument(sectionY >= -32 && sectionY <= 31);
@@ -185,14 +315,9 @@ public boolean hasScheduledUpdate(@Range(from = 0, to = 15) int x, int y, @Range
return scheduledUpdateInfo != null && scheduledUpdateInfo.getLayer() == layer;
}
- public Int2ObjectNonBlockingMap getScheduledUpdatesUnsafe() {
- return scheduledUpdates;
- }
-
@Override
public short getHeight(int x, int z) {
- Preconditions.checkArgument(x >= 0 && x <= 15);
- Preconditions.checkArgument(z >= 0 && z <= 15);
+ checkXZ(x, z);
return getHeightUnsafe(HeightMap.computeIndex(x, z));
}
@@ -202,9 +327,7 @@ protected short getHeightUnsafe(int index) {
@Override
public void setHeight(int x, int z, short height) {
- Preconditions.checkArgument(x >= 0 && x <= 15);
- Preconditions.checkArgument(z >= 0 && z <= 15);
- Preconditions.checkArgument(height >= -512 && height <= 511);
+ checkXYZ(x, height, z);
setHeightUnsafe(HeightMap.computeIndex(x, z), height);
}
@@ -310,6 +433,7 @@ protected void encodeBlockChangesInLayer(UpdateSubChunkBlocksPacket[] pks, Int2O
@Override
public void setBiome(int x, int y, int z, BiomeType biomeType) {
+ checkXYZ(x, y, z);
this.getSection(y >> 4).setBiomeType(x, y & 0xf, z, biomeType);
}
@@ -338,10 +462,6 @@ public Map getEntities() {
return Collections.unmodifiableMap(entities);
}
- public Long2ObjectNonBlockingMap getEntitiesUnsafe() {
- return entities;
- }
-
@Override
public void addBlockEntity(BlockEntity blockEntity) {
Preconditions.checkNotNull(blockEntity);
@@ -362,23 +482,152 @@ public BlockEntity getBlockEntity(int x, int y, int z) {
}
@Override
- public @UnmodifiableView Map getBlockEntities() {
+ public Map getBlockEntities() {
return Collections.unmodifiableMap(blockEntities);
}
- public Int2ObjectNonBlockingMap getBlockEntitiesUnsafe() {
- return blockEntities;
+ @Override
+ public Chunk toSafeChunk() {
+ return safeChunk;
}
- public Chunk toSafeChunk() {
- return new AllayChunk(this);
+ public void sendChunkPackets() {
+ if (chunkLoaders.isEmpty()) {
+ clearBlockChanges();
+ chunkPacketQueue.clear();
+ return;
+ }
+
+ // Send block updates
+ var pks = encodeAndClearBlockChanges();
+ // pks == null -> no block changes
+ if (pks != null) {
+ for (var pk : pks) {
+ if (pk == null) {
+ continue;
+ }
+
+ sendChunkPacket(pk);
+ }
+ }
+
+ // Send other chunk packets
+ if (chunkPacketQueue.isEmpty()) {
+ return;
+ }
+ ChunkPacketEntry entry;
+ while ((entry = chunkPacketQueue.poll()) != null) {
+ sendChunkPacket(entry.packet(), entry.chunkLoaderPredicate());
+ }
+ }
+
+ @Override
+ public void addChunkPacket(BedrockPacket packet) {
+ chunkPacketQueue.add(new ChunkPacketEntry(packet, null));
+ }
+
+ @Override
+ public void addChunkPacket(BedrockPacket packet, Predicate chunkLoaderPredicate) {
+ chunkPacketQueue.add(new ChunkPacketEntry(packet, chunkLoaderPredicate));
+ }
+
+ @Override
+ public Set getChunkLoaders() {
+ return Collections.unmodifiableSet(chunkLoaders);
+ }
+
+ @Override
+ public void addChunkLoader(ChunkLoader chunkLoader) {
+ chunkLoaders.add(chunkLoader);
+ }
+
+ @Override
+ public void removeChunkLoader(ChunkLoader chunkLoader) {
+ chunkLoaders.remove(chunkLoader);
+ }
+
+ @Override
+ public int getChunkLoaderCount() {
+ return chunkLoaders.size();
}
- public void setState(ChunkState next) {
- ChunkState curr;
- do {
- curr = STATE_FIELD.get(this);
- Preconditions.checkState(curr.ordinal() <= next.ordinal(), "invalid state transition: %s => %s", curr, next);
- } while (!STATE_FIELD.compareAndSet(this, curr, next));
+ @Override
+ public void sendChunkPacket(BedrockPacket packet) {
+ chunkLoaders.forEach(chunkLoader -> chunkLoader.sendPacket(packet));
}
+
+ @Override
+ public void sendChunkPacket(BedrockPacket packet, Predicate chunkLoaderPredicate) {
+ chunkLoaders.stream()
+ .filter(chunkLoader -> chunkLoaderPredicate == null || chunkLoaderPredicate.test(chunkLoader))
+ .forEach(chunkLoader -> chunkLoader.sendPacket(packet));
+ }
+
+ public LevelChunkPacket createSubChunkLevelChunkPacket() {
+ var levelChunkPacket = new LevelChunkPacket();
+ levelChunkPacket.setDimension(getDimensionInfo().dimensionId());
+ levelChunkPacket.setChunkX(this.getX());
+ levelChunkPacket.setChunkZ(this.getZ());
+ levelChunkPacket.setCachingEnabled(false);
+ levelChunkPacket.setRequestSubChunks(true);
+ // This value is used in the subchunk system to control the maximum value of sectionY requested by the client.
+ levelChunkPacket.setSubChunkLimit(getDimensionInfo().chunkSectionCount());
+ levelChunkPacket.setData(Unpooled.EMPTY_BUFFER);
+ return levelChunkPacket;
+ }
+
+ public LevelChunkPacket createFullLevelChunkPacketChunk() {
+ var levelChunkPacket = new LevelChunkPacket();
+ levelChunkPacket.setDimension(getDimensionInfo().dimensionId());
+ levelChunkPacket.setChunkX(this.getX());
+ levelChunkPacket.setChunkZ(this.getZ());
+ levelChunkPacket.setCachingEnabled(false);
+ levelChunkPacket.setRequestSubChunks(false);
+ levelChunkPacket.setSubChunksLength(getDimensionInfo().chunkSectionCount());
+ try {
+ levelChunkPacket.setData(writeToNetwork());
+ } catch (Throwable t) {
+ levelChunkPacket.setData(Unpooled.EMPTY_BUFFER);
+ }
+ return levelChunkPacket;
+ }
+
+ private ByteBuf writeToNetwork() {
+ var byteBuf = ByteBufAllocator.DEFAULT.buffer();
+ try {
+ writeToNetwork0(byteBuf);
+ return byteBuf;
+ } catch (Throwable t) {
+ log.error("Error while encoding chunk(x={}, z={})!", getX(), getZ(), t);
+ byteBuf.release();
+ throw t;
+ }
+ }
+
+ private void writeToNetwork0(ByteBuf byteBuf) {
+ // Write blocks
+ for (int i = getDimensionInfo().minSectionY(); i <= getDimensionInfo().maxSectionY(); i++) {
+ getSection(i).writeToNetwork(byteBuf);
+ }
+
+ // Write biomes
+ for (var section : sections) {
+ section.biomes().writeToNetwork(byteBuf, BiomeType::getId);
+ }
+ byteBuf.writeByte(0); // edu- border blocks
+
+ // Write block entities
+ var blockEntities = getBlockEntities().values();
+ if (!blockEntities.isEmpty()) {
+ try (var writer = NbtUtils.createNetworkWriter(new ByteBufOutputStream(byteBuf))) {
+ for (var blockEntity : blockEntities) {
+ writer.writeTag(blockEntity.saveNBT());
+ }
+ } catch (Throwable t) {
+ log.error("Error while encoding block entities in chunk {}, {}", getX(), getZ(), t);
+ }
+ }
+ }
+
+ protected record ChunkPacketEntry(BedrockPacket packet, Predicate chunkLoaderPredicate) {}
}
diff --git a/server/src/main/java/org/allaymc/server/world/chunk/ChunkSectionLocks.java b/server/src/main/java/org/allaymc/server/world/chunk/ChunkSectionLocks.java
new file mode 100644
index 0000000000..e0de341f19
--- /dev/null
+++ b/server/src/main/java/org/allaymc/server/world/chunk/ChunkSectionLocks.java
@@ -0,0 +1,67 @@
+package org.allaymc.server.world.chunk;
+
+import org.allaymc.api.world.DimensionInfo;
+
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+/**
+ * @author daoge_cmd
+ */
+public final class ChunkSectionLocks {
+
+ private final int minSectionY;
+ private final ReadWriteLock[] locks;
+
+ public ChunkSectionLocks(DimensionInfo dimensionInfo) {
+ this(dimensionInfo.minSectionY(), dimensionInfo.chunkSectionCount());
+ }
+
+ public ChunkSectionLocks(int minSectionY, int sectionCount) {
+ this.minSectionY = minSectionY;
+ this.locks = new ReentrantReadWriteLock[sectionCount];
+ for (int i = 0; i < sectionCount; i++) {
+ locks[i] = new ReentrantReadWriteLock();
+ }
+ }
+
+ public void lockReadLockAt(int sectionY) {
+ locks[sectionY - minSectionY].readLock().lock();
+ }
+
+ public void lockAllReadLocks() {
+ for (var lock : locks) {
+ lock.readLock().lock();
+ }
+ }
+
+ public void unlockReadLockAt(int sectionY) {
+ locks[sectionY - minSectionY].readLock().unlock();
+ }
+
+ public void unlockAllReadLocks() {
+ for (var lock : locks) {
+ lock.readLock().unlock();
+ }
+ }
+
+ public void lockWriteLockAt(int sectionY) {
+ locks[sectionY - minSectionY].writeLock().lock();
+ }
+
+ public void lockAllWriteLocks() {
+ for (var lock : locks) {
+ lock.writeLock().lock();
+ }
+ }
+
+ public void unlockWriteLockAt(int sectionY) {
+ locks[sectionY - minSectionY].writeLock().unlock();
+ }
+
+ public void unlockAllWriteLocks() {
+ for (var lock : locks) {
+ lock.writeLock().unlock();
+ }
+ }
+}
diff --git a/server/src/main/java/org/allaymc/server/world/generator/AllayWorldGenerator.java b/server/src/main/java/org/allaymc/server/world/generator/AllayWorldGenerator.java
index af263f2076..fed9635f33 100644
--- a/server/src/main/java/org/allaymc/server/world/generator/AllayWorldGenerator.java
+++ b/server/src/main/java/org/allaymc/server/world/generator/AllayWorldGenerator.java
@@ -21,7 +21,6 @@
import org.allaymc.server.AllayServer;
import org.allaymc.server.datastruct.collections.nb.Long2ObjectNonBlockingMap;
import org.allaymc.server.datastruct.collections.queue.BlockingQueueWrapper;
-import org.allaymc.server.world.chunk.AllayChunk;
import org.allaymc.server.world.chunk.AllayUnsafeChunk;
import java.util.Collections;
@@ -130,7 +129,7 @@ private void processPopulationQueue() {
statusPopulatedToFinished(chunk);
var chunkHash = HashUtils.hashXZ(chunk.getX(), chunk.getZ());
// Remove recorded futures
- ((AllayChunk) chunk).setChunkSetCallback(success -> {
+ ((AllayUnsafeChunk) chunk.toUnsafeChunk()).setChunkSetCallback(success -> {
// The stored futures should always being removed
chunkNoiseFutures.remove(chunkHash);
chunkFutures.remove(chunkHash);
@@ -234,7 +233,7 @@ private Chunk statusEmptyToNoised(Chunk chunk) {
}
}
- ((AllayChunk) chunk).setState(ChunkState.NOISED);
+ ((AllayUnsafeChunk) chunk.toUnsafeChunk()).setState(ChunkState.NOISED);
return chunk;
}
@@ -251,7 +250,7 @@ private void statusNoisedToPopulated(Chunk chunk) {
}
}
- ((AllayChunk) chunk).setState(ChunkState.POPULATED);
+ ((AllayUnsafeChunk) chunk.toUnsafeChunk()).setState(ChunkState.POPULATED);
}
private void statusPopulatedToFinished(Chunk chunk) {
@@ -267,8 +266,8 @@ private void statusPopulatedToFinished(Chunk chunk) {
}
}
- ((AllayChunk) chunk).setState(ChunkState.ENTITY_SPAWNED);
- ((AllayChunk) chunk).setState(ChunkState.FINISHED);
+ ((AllayUnsafeChunk) chunk.toUnsafeChunk()).setState(ChunkState.ENTITY_SPAWNED);
+ ((AllayUnsafeChunk) chunk.toUnsafeChunk()).setState(ChunkState.FINISHED);
}
protected static final class AllayWorldGeneratorBuilder implements WorldGenerator.WorldGeneratorBuilder {
diff --git a/server/src/main/java/org/allaymc/server/world/service/AllayChunkService.java b/server/src/main/java/org/allaymc/server/world/service/AllayChunkService.java
index 8bd63d048b..f37048c38d 100644
--- a/server/src/main/java/org/allaymc/server/world/service/AllayChunkService.java
+++ b/server/src/main/java/org/allaymc/server/world/service/AllayChunkService.java
@@ -23,7 +23,6 @@
import org.allaymc.api.world.service.ChunkService;
import org.allaymc.api.world.storage.WorldStorage;
import org.allaymc.server.datastruct.collections.nb.Long2ObjectNonBlockingMap;
-import org.allaymc.server.world.chunk.AllayChunk;
import org.allaymc.server.world.chunk.AllayUnsafeChunk;
import org.allaymc.server.world.generator.AllayWorldGenerator;
import org.jetbrains.annotations.UnmodifiableView;
@@ -94,7 +93,7 @@ private void tickChunks(long currentTick) {
}
try {
- ((AllayChunk) chunk).tick(currentTick, dimension, worldStorage);
+ ((AllayUnsafeChunk) chunk.toUnsafeChunk()).tick(currentTick, dimension, worldStorage);
} catch (Throwable t) {
log.error("Error while ticking chunk({}, {})!", chunk.getX(), chunk.getZ(), t);
}
@@ -117,7 +116,7 @@ private boolean shouldTickChunk(Chunk chunk) {
}
public void sendChunkPackets() {
- loadedChunks.values().forEach(Chunk::sendChunkPackets);
+ loadedChunks.values().forEach(chunk -> ((AllayUnsafeChunk) chunk.toUnsafeChunk()).sendChunkPackets());
}
private void tickChunkLoaders() {
@@ -259,14 +258,14 @@ public CompletableFuture loadChunk(int x, int z) {
}).thenAccept(preparedChunk -> {
boolean success = true;
try {
- ((AllayChunk) preparedChunk).beforeSetChunk(dimension);
+ ((AllayUnsafeChunk) preparedChunk.toUnsafeChunk()).beforeSetChunk(dimension);
setChunk(x, z, preparedChunk);
} catch (Throwable t) {
log.error("Error while setting chunk ({},{}) !", x, z, t);
success = false;
} finally {
loadingChunks.remove(hashXZ);
- ((AllayChunk) preparedChunk).afterSetChunk(dimension, success);
+ ((AllayUnsafeChunk) preparedChunk.toUnsafeChunk()).afterSetChunk(dimension, success);
if (success) {
future.complete(preparedChunk);
new ChunkLoadEvent(dimension, preparedChunk).call();
@@ -544,8 +543,8 @@ private void loadAndSendQueuedChunks() {
var lcpStream = chunkReadyToSend.values().stream();
lcpStream.sorted(chunkDistanceComparator).forEachOrdered(chunk -> {
var lcp = useSubChunkSendingSystem ?
- ((AllayChunk) chunk).createSubChunkLevelChunkPacket() :
- ((AllayChunk) chunk).createFullLevelChunkPacketChunk();
+ ((AllayUnsafeChunk) chunk.toUnsafeChunk()).createSubChunkLevelChunkPacket() :
+ ((AllayUnsafeChunk) chunk.toUnsafeChunk()).createFullLevelChunkPacketChunk();
chunkLoader.sendPacket(lcp);
chunkLoader.onChunkInRangeSend(chunk);
});
@@ -580,7 +579,7 @@ public void stop() {
private void tick() {
while (!chunkSendingQueue.isEmpty()) {
var chunk = chunkSendingQueue.poll();
- var lcp = ((AllayChunk) chunk).createFullLevelChunkPacketChunk();
+ var lcp = ((AllayUnsafeChunk) chunk.toUnsafeChunk()).createFullLevelChunkPacketChunk();
chunkLoader.sendPacket(lcp);
chunkLoader.onChunkInRangeSend(chunk);
}
diff --git a/server/src/main/java/org/allaymc/server/world/service/AllayEntityService.java b/server/src/main/java/org/allaymc/server/world/service/AllayEntityService.java
index e13003da6e..2905703744 100644
--- a/server/src/main/java/org/allaymc/server/world/service/AllayEntityService.java
+++ b/server/src/main/java/org/allaymc/server/world/service/AllayEntityService.java
@@ -8,7 +8,7 @@
import org.allaymc.api.world.service.EntityService;
import org.allaymc.server.entity.component.EntityBaseComponentImpl;
import org.allaymc.server.entity.impl.EntityImpl;
-import org.allaymc.server.world.chunk.AllayChunk;
+import org.allaymc.server.world.chunk.AllayUnsafeChunk;
import java.util.Queue;
@@ -41,15 +41,15 @@ public void tick() {
private void removeEntityImmediately(Entity entity) {
new EntityDespawnEvent(entity).call();
- var chunk = (AllayChunk) entity.getCurrentChunk();
- if (chunk == null) {
+ var unsafeChunk = (AllayUnsafeChunk) entity.getCurrentChunk().toUnsafeChunk();
+ if (unsafeChunk == null) {
throw new IllegalStateException("Trying to despawn an entity from an unload chunk!");
}
- chunk.removeEntity(entity.getRuntimeId());
+ unsafeChunk.removeEntity(entity.getRuntimeId());
entityPhysicsService.removeEntity(entity);
-
entity.despawnFromAll();
+
var baseComponent = ((EntityBaseComponentImpl) ((EntityImpl) entity).getBaseComponent());
baseComponent.setWillBeDespawnedNextTick(false);
baseComponent.setSpawned(false);
@@ -58,15 +58,15 @@ private void removeEntityImmediately(Entity entity) {
private void addEntityImmediately(Entity entity) {
new EntitySpawnEvent(entity).call();
- var chunk = (AllayChunk) entity.getCurrentChunk();
- if (chunk == null) {
+ var unsafeChunk = (AllayUnsafeChunk) entity.getCurrentChunk().toUnsafeChunk();
+ if (unsafeChunk == null) {
throw new IllegalStateException("Entity can't spawn in unloaded chunk!");
}
- chunk.addEntity(entity);
- entity.spawnTo(chunk.getPlayerChunkLoaders());
-
+ unsafeChunk.addEntity(entity);
entityPhysicsService.addEntity(entity);
+ entity.spawnTo(unsafeChunk.getPlayerChunkLoaders());
+
var baseComponent = ((EntityBaseComponentImpl) ((EntityImpl) entity).getBaseComponent());
baseComponent.setWillBeSpawnedNextTick(false);
baseComponent.setSpawned(true);
diff --git a/server/src/main/java/org/allaymc/server/world/storage/AllayLevelDBWorldStorage.java b/server/src/main/java/org/allaymc/server/world/storage/AllayLevelDBWorldStorage.java
index 3cb7c58739..f12ec729a9 100644
--- a/server/src/main/java/org/allaymc/server/world/storage/AllayLevelDBWorldStorage.java
+++ b/server/src/main/java/org/allaymc/server/world/storage/AllayLevelDBWorldStorage.java
@@ -19,7 +19,7 @@
import org.allaymc.api.world.biome.BiomeType;
import org.allaymc.api.world.chunk.Chunk;
import org.allaymc.api.world.chunk.ChunkState;
-import org.allaymc.api.world.chunk.UnsafeChunk;
+import org.allaymc.api.world.chunk.OperationType;
import org.allaymc.api.world.storage.WorldStorage;
import org.allaymc.api.world.storage.WorldStorageException;
import org.allaymc.server.datastruct.collections.nb.Int2ObjectNonBlockingMap;
@@ -29,8 +29,8 @@
import org.allaymc.server.world.AllayWorldData;
import org.allaymc.server.world.HeightMap;
import org.allaymc.server.world.chunk.AllayChunkBuilder;
+import org.allaymc.server.world.chunk.AllayChunkSection;
import org.allaymc.server.world.chunk.AllayUnsafeChunk;
-import org.allaymc.server.world.chunk.ChunkSection;
import org.allaymc.server.world.chunk.ScheduledUpdateInfo;
import org.allaymc.server.world.gamerule.AllayGameRules;
import org.allaymc.updater.block.BlockStateUpdaters;
@@ -190,12 +190,13 @@ public void writeChunkSync(Chunk chunk) {
.writeByte(VanillaChunkState.DONE.ordinal())
.array()
);
- chunk.batchProcess(c -> {
- serializeSections(writeBatch, (AllayUnsafeChunk) c);
- serializeHeightAndBiome(writeBatch, (AllayUnsafeChunk) c);
- serializeEntitiesAndBlockEntities(writeBatch, (AllayUnsafeChunk) c);
- serializeScheduledUpdates(writeBatch, (AllayUnsafeChunk) c);
- });
+ chunk.applyOperation(c -> {
+ var allayUnsafeChunk = (AllayUnsafeChunk) c;
+ serializeSections(writeBatch, allayUnsafeChunk);
+ serializeHeightAndBiome(writeBatch, allayUnsafeChunk);
+ serializeEntitiesAndBlockEntities(writeBatch, allayUnsafeChunk);
+ serializeScheduledUpdates(writeBatch, allayUnsafeChunk);
+ }, OperationType.READ, OperationType.READ, OperationType.READ);
this.db.write(writeBatch);
} catch (IOException e) {
throw new WorldStorageException(e);
@@ -354,13 +355,13 @@ public void shutdown() {
private static void serializeSections(WriteBatch writeBatch, AllayUnsafeChunk chunk) {
for (int ySection = chunk.getDimensionInfo().minSectionY(); ySection <= chunk.getDimensionInfo().maxSectionY(); ySection++) {
- ChunkSection section = chunk.getSection(ySection);
+ AllayChunkSection section = chunk.getSection(ySection);
ByteBuf buffer = ByteBufAllocator.DEFAULT.ioBuffer();
try {
- buffer.writeByte(ChunkSection.CURRENT_CHUNK_SECTION_VERSION);
- buffer.writeByte(ChunkSection.LAYER_COUNT);
+ buffer.writeByte(AllayChunkSection.CURRENT_CHUNK_SECTION_VERSION);
+ buffer.writeByte(AllayChunkSection.LAYER_COUNT);
buffer.writeByte(ySection);
- for (int i = 0; i < ChunkSection.LAYER_COUNT; i++) {
+ for (int i = 0; i < AllayChunkSection.LAYER_COUNT; i++) {
section.blockLayers()[i].writeToStoragePersistent(buffer, BlockState::getBlockStateTag);
}
writeBatch.put(LevelDBKey.CHUNK_SECTION_PREFIX.getKey(chunk.getX(), chunk.getZ(), ySection, chunk.getDimensionInfo()), Utils.convertByteBuf2Array(buffer));
@@ -372,7 +373,7 @@ private static void serializeSections(WriteBatch writeBatch, AllayUnsafeChunk ch
private static void deserializeSections(DB db, AllayChunkBuilder builder) {
DimensionInfo dimensionInfo = builder.getDimensionInfo();
- ChunkSection[] sections = new ChunkSection[dimensionInfo.chunkSectionCount()];
+ AllayChunkSection[] sections = new AllayChunkSection[dimensionInfo.chunkSectionCount()];
var minSectionY = dimensionInfo.minSectionY();
for (int ySection = minSectionY; ySection <= dimensionInfo.maxSectionY(); ySection++) {
byte[] sectionData = db.get(LevelDBKey.CHUNK_SECTION_PREFIX.getKey(builder.getChunkX(), builder.getChunkZ(), ySection, dimensionInfo));
@@ -382,7 +383,7 @@ private static void deserializeSections(DB db, AllayChunkBuilder builder) {
var byteBuf = Unpooled.wrappedBuffer(sectionData);
byte subChunkVersion = byteBuf.readByte();
- int layers = ChunkSection.LAYER_COUNT;
+ int layers = AllayChunkSection.LAYER_COUNT;
switch (subChunkVersion) {
case 9, 8:
// Layers
@@ -392,19 +393,19 @@ private static void deserializeSections(DB db, AllayChunkBuilder builder) {
byteBuf.readByte();
}
case 1:
- ChunkSection section;
- if (layers <= ChunkSection.LAYER_COUNT) {
+ AllayChunkSection section;
+ if (layers <= AllayChunkSection.LAYER_COUNT) {
// This is the normal situation where the chunk section is loaded correctly,
// and we use the single-arg constructor of ChunkSection directly to avoid
// using Arrays.fill(), which will be slower
- section = new ChunkSection((byte) ySection);
+ section = new AllayChunkSection((byte) ySection);
} else {
// Currently only two layers are used in minecraft, so that might mean this chunk is corrupted
// However we can still load it c:
log.warn("Loading chunk section ({}, {}, {}) with {} layers, which might mean that this chunk is corrupted!", builder.getChunkX(), ySection, builder.getChunkZ(), layers);
@SuppressWarnings("rawtypes") Palette[] palettes = new Palette[layers];
Arrays.fill(palettes, new Palette<>(BlockTypes.AIR.getDefaultState()));
- section = new ChunkSection((byte) ySection, palettes);
+ section = new AllayChunkSection((byte) ySection, palettes);
}
for (int layer = 0; layer < layers; layer++) {
section.blockLayers()[layer].readFromStoragePersistent(byteBuf, AllayLevelDBWorldStorage::fastBlockStateDeserializer);
@@ -452,10 +453,10 @@ private static BlockState fastBlockStateDeserializer(ByteBuf buffer) {
return BlockTypes.UNKNOWN.getDefaultState();
}
- private static ChunkSection[] fillNullSections(ChunkSection[] sections, DimensionInfo dimensionInfo) {
+ private static AllayChunkSection[] fillNullSections(AllayChunkSection[] sections, DimensionInfo dimensionInfo) {
for (int i = 0; i < sections.length; i++) {
if (sections[i] == null) {
- sections[i] = new ChunkSection((byte) (i + dimensionInfo.minSectionY()));
+ sections[i] = new AllayChunkSection((byte) (i + dimensionInfo.minSectionY()));
}
}
return sections;
@@ -476,7 +477,7 @@ private static void serializeHeightAndBiome(WriteBatch writeBatch, AllayUnsafeCh
// Serialize biome
Palette lastPalette = null;
for (int y = chunk.getDimensionInfo().minSectionY(); y <= chunk.getDimensionInfo().maxSectionY(); y++) {
- ChunkSection section = chunk.getSection(y);
+ AllayChunkSection section = chunk.getSection(y);
section.biomes().writeToStorageRuntime(heightAndBiomesBuffer, BiomeType::getId, lastPalette);
lastPalette = section.biomes();
}
@@ -504,7 +505,7 @@ private static void deserializeHeightAndBiome(DB db, AllayChunkBuilder builder)
Palette lastPalette = null;
var minSectionY = builder.getDimensionInfo().minSectionY();
for (int y = minSectionY; y <= builder.getDimensionInfo().maxSectionY(); y++) {
- ChunkSection section = builder.getSections()[y - minSectionY];
+ AllayChunkSection section = builder.getSections()[y - minSectionY];
if (section == null) continue;
section.biomes().readFromStorageRuntime(heightAndBiomesBuffer, AllayLevelDBWorldStorage::getBiomeByIdNonNull, lastPalette);
lastPalette = section.biomes();
@@ -527,13 +528,13 @@ private static void deserializeHeightAndBiome(DB db, AllayChunkBuilder builder)
var minSectionY = builder.getDimensionInfo().minSectionY();
for (int y = minSectionY; y <= builder.getDimensionInfo().maxSectionY(); y++) {
- ChunkSection section = builder.getSections()[y - minSectionY];
+ AllayChunkSection section = builder.getSections()[y - minSectionY];
if (section == null) continue;
final Palette biomePalette = section.biomes();
for (int x = 0; x < 16; x++) {
for (int z = 0; z < 16; z++) {
for (int sy = 0; sy < 16; sy++) {
- biomePalette.set(UnsafeChunk.index(x, sy, z), getBiomeByIdNonNull(biomes[x + 16 * z]));
+ biomePalette.set(HashUtils.hashChunkSectionXYZ(x, sy, z), getBiomeByIdNonNull(biomes[x + 16 * z]));
}
}
}
@@ -616,7 +617,7 @@ private static List deserializeNbtTagsFromBytes(byte[] bytes) {
}
private static void serializeScheduledUpdates(WriteBatch writeBatch, AllayUnsafeChunk chunk) {
- var scheduledUpdates = chunk.getScheduledUpdatesUnsafe().values();
+ var scheduledUpdates = chunk.getScheduledUpdates().values();
byte[] key = LevelDBKey.ALLAY_SCHEDULED_UPDATES.getKey(chunk.getX(), chunk.getZ(), chunk.getDimensionInfo());
if (scheduledUpdates.isEmpty()) {
writeBatch.delete(key);
diff --git a/server/src/test/java/org/allaymc/server/world/chunk/AllayChunkTest.java b/server/src/test/java/org/allaymc/server/world/chunk/AllayChunkTest.java
index 313e39e73e..eabce9d5ee 100644
--- a/server/src/test/java/org/allaymc/server/world/chunk/AllayChunkTest.java
+++ b/server/src/test/java/org/allaymc/server/world/chunk/AllayChunkTest.java
@@ -19,7 +19,7 @@
@Slf4j
@ExtendWith(AllayTestExtension.class)
class AllayChunkTest {
- final Chunk chunk = new AllayChunk(AllayUnsafeChunk.builder().newChunk(0, 0, DimensionInfo.OVERWORLD));
+ final Chunk chunk = AllayUnsafeChunk.builder().newChunk(0, 0, DimensionInfo.OVERWORLD).toSafeChunk();
@Test
void testInvalidGetBlockStateMethodCall() {
diff --git a/server/src/test/java/org/allaymc/server/world/service/AllayChunkServiceTest.java b/server/src/test/java/org/allaymc/server/world/service/AllayChunkServiceTest.java
index 1deb34515a..961f9951b5 100644
--- a/server/src/test/java/org/allaymc/server/world/service/AllayChunkServiceTest.java
+++ b/server/src/test/java/org/allaymc/server/world/service/AllayChunkServiceTest.java
@@ -2,6 +2,8 @@
import org.allaymc.api.world.Dimension;
import org.allaymc.api.world.DimensionInfo;
+import org.allaymc.api.world.World;
+import org.allaymc.server.world.AllayWorldData;
import org.allaymc.server.world.generator.AllayWorldGenerator;
import org.allaymc.testutils.AllayTestExtension;
import org.junit.jupiter.api.*;
@@ -18,6 +20,7 @@
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
public class AllayChunkServiceTest {
static Dimension mockDimension = Mockito.mock(Dimension.class);
+ static World mockWorld = Mockito.mock(World.class);
static AllayChunkService chunkService;
static TestChunkLoader chunkLoader1 = new TestChunkLoader();
@@ -26,6 +29,9 @@ static void init() {
Mockito.when(mockDimension.getDimensionInfo()).thenReturn(DimensionInfo.OVERWORLD);
var testLightService = new TestLightService();
Mockito.when(mockDimension.getLightService()).thenReturn(testLightService);
+ Mockito.when(mockDimension.getWorld()).thenReturn(mockWorld);
+ var defaultWorldData = AllayWorldData.builder().build();
+ Mockito.when(mockWorld.getWorldData()).thenReturn(defaultWorldData);
chunkService = new AllayChunkService(mockDimension, AllayWorldGenerator.builder().name("TEST").build(), new TestWorldStorage());
}
diff --git a/server/src/test/java/org/allaymc/server/world/service/TestWorldStorage.java b/server/src/test/java/org/allaymc/server/world/service/TestWorldStorage.java
index 17648fcdbc..d71263bd49 100644
--- a/server/src/test/java/org/allaymc/server/world/service/TestWorldStorage.java
+++ b/server/src/test/java/org/allaymc/server/world/service/TestWorldStorage.java
@@ -5,7 +5,6 @@
import org.allaymc.api.world.chunk.Chunk;
import org.allaymc.api.world.chunk.ChunkState;
import org.allaymc.api.world.storage.WorldStorage;
-import org.allaymc.server.world.chunk.AllayChunk;
import org.allaymc.server.world.chunk.AllayUnsafeChunk;
import java.util.concurrent.CompletableFuture;
@@ -17,7 +16,7 @@ public class TestWorldStorage implements WorldStorage {
@Override
public CompletableFuture readChunk(int chunkX, int chunkZ, DimensionInfo dimensionInfo) {
var chunk = AllayUnsafeChunk.builder().newChunk(chunkX, chunkZ, dimensionInfo).toSafeChunk();
- ((AllayChunk) chunk).setState(ChunkState.FINISHED);
+ ((AllayUnsafeChunk) chunk.toUnsafeChunk()).setState(ChunkState.FINISHED);
return CompletableFuture.completedFuture(chunk);
}