Some changes for efficiency(not yet)

-Tried to use threads/executors in ChunkRequestDaemon, it just hangs.
-Added isEmpty and isOpaque attributes to DefaultChunkData (should this
just be in ChunkData?)
	-Added compute and getter functions to access(for everything after
loading)
	-Doesn't render empty chunks(not yet used)
-Using format 65537 allows the empty and opaqueness to be saved. They do
not do anything yet and there is no way to set them yet
	-Added loadRegionX and ".progressia_chunkx" file
-removed formats 0 and 1, which use individual chunk files.
This commit is contained in:
opfromthestart 2021-08-27 16:59:05 -04:00
parent 9dcb3a7748
commit 98250cd524
5 changed files with 283 additions and 173 deletions

View File

@ -102,8 +102,10 @@ public class ChunkRender
}
public synchronized void render(ShapeRenderHelper renderer) {
if (!data.isEmpty) {
model.render(renderer);
}
}
public synchronized void update() {
model.update();

View File

@ -25,8 +25,8 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Objects;
import glm.vec._3.i.Vec3i;
import ru.windcorp.progressia.common.world.block.BlockData;
@ -46,6 +46,11 @@ public class DefaultChunkData implements ChunkData {
public static final int BLOCKS_PER_CHUNK = Coordinates.CHUNK_SIZE;
public static final int CHUNK_RADIUS = BLOCKS_PER_CHUNK / 2;
public boolean isEmpty = false;
public boolean isOpaque = false;
public static HashSet<BlockData> transparent;
private final Vec3i position = new Vec3i();
private final DefaultWorldData world;
@ -200,6 +205,44 @@ public class DefaultChunkData implements ChunkData {
this.generationHint = generationHint;
}
public void computeOpaque()
{
for (int xyz=0;xyz<BLOCKS_PER_CHUNK*BLOCKS_PER_CHUNK*BLOCKS_PER_CHUNK;xyz++)
{
if (transparent.contains( blocks[xyz]))
{
isOpaque = false;
return;
}
}
isOpaque = true;
}
public boolean isOpaque()
{
return isOpaque;
}
public void computeEmpty()
{
BlockData air = new BlockData("Test:Air");
for (int xyz=0;xyz<BLOCKS_PER_CHUNK*BLOCKS_PER_CHUNK*BLOCKS_PER_CHUNK;xyz++)
{
if (blocks[xyz] != air)
{
isEmpty = false;
return;
}
}
isEmpty = true;
}
public boolean isEmpty()
{
return isEmpty;
}
/**
* Implementation of {@link TileDataStack} used internally by
* {@link DefaultChunkData} to

View File

@ -20,7 +20,8 @@ package ru.windcorp.progressia.server.management.load;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import glm.vec._3.i.Vec3i;
import ru.windcorp.progressia.common.Units;
import ru.windcorp.progressia.common.world.generic.ChunkMap;
@ -44,6 +45,8 @@ public class ChunkRequestDaemon {
private final ChunkSet toGenerate = ChunkSets.newHashSet();
private final ChunkSet toRequestUnload = ChunkSets.newHashSet();
private final ExecutorService executor = Executors.newSingleThreadExecutor();
private final Collection<Vec3i> buffer = new ArrayList<>();
private static class ChunkUnloadRequest {
@ -113,16 +116,16 @@ public class ChunkRequestDaemon {
}
private void processLoadQueues() {
toRequestUnload.forEach(this::scheduleUnload);
toRequestUnload.forEach((pos) -> executor.submit(() -> scheduleUnload(pos)));
toRequestUnload.clear();
toLoad.forEach(getChunkManager()::loadOrGenerateChunk);
toLoad.forEach((pos) -> executor.submit(() -> getChunkManager().loadOrGenerateChunk(pos)));
toLoad.clear();
toGenerate.forEach(getChunkManager()::loadOrGenerateChunk);
toGenerate.forEach((pos) -> executor.submit(() -> getChunkManager().loadOrGenerateChunk(pos)));
toGenerate.clear();
unloadScheduledChunks();
executor.submit(() -> unloadScheduledChunks());
}
private void scheduleUnload(Vec3i chunkPos) {

View File

@ -56,6 +56,9 @@ public class PlanetFeatureGenerator {
generateBorderFeatures(server, chunk);
}
chunk.computeEmpty();
chunk.computeOpaque();
chunk.setGenerationHint(true);
}

View File

@ -126,7 +126,7 @@ public class TestWorldDiskIO {
private static final Logger LOG = LogManager.getLogger("TestWorldDiskIO");
private static HashMap<HashableVec3i, RandomFileMapped> inOutMap;
private static final boolean ENABLE = true;
private static final boolean ENABLE = false;
private static int maxSize = 1048576;
private static int sectorSize = maxSize / 256;
@ -227,19 +227,19 @@ public class TestWorldDiskIO {
private static void setRegionSize(int format) {
inOutMap = new HashMap<HashableVec3i, RandomFileMapped>();
switch (format) {
case 0:
case 1:
regionSize = new Vec3i(1);
chunksPerRegion = 1;
currentFormat = format;
extension = ".progressia_chunk";
break;
case 65536:
default:
regionSize = new Vec3i(16);
chunksPerRegion = 16 * 16 * 16;
currentFormat = 65536;
extension = ".progressia_region";
break;
case 65537:
regionSize = new Vec3i(16);
chunksPerRegion = 16 * 16 * 16;
currentFormat = 65536;
extension = ".progressia_regionx";
break;
}
}
@ -288,63 +288,7 @@ public class TestWorldDiskIO {
try {
if (currentFormat == 0) {
LOG.debug(
"Saving {} {} {}",
chunk.getPosition().x,
chunk.getPosition().y,
chunk.getPosition().z
);
Files.createDirectories(SAVE_DIR);
Path path = SAVE_DIR.resolve(
String.format(
"chunk_%+d_%+d_%+d" + extension,
chunk.getPosition().x,
chunk.getPosition().y,
chunk.getPosition().z
)
);
try (
DataOutputStream output = new DataOutputStream(
new DeflaterOutputStream(new BufferedOutputStream(Files.newOutputStream(path)))
)
) {
ChunkIO.save(chunk, output, IOContext.SAVE);
writeGenerationHint(chunk, output, server);
}
} else if (currentFormat == 1) {
LOG.debug(
"Saving {} {} {}",
chunk.getPosition().x,
chunk.getPosition().y,
chunk.getPosition().z
);
Files.createDirectories(SAVE_DIR);
Vec3i saveCoords = getRegion(chunk.getPosition());
Path path = SAVE_DIR.resolve(
String.format(
"chunk_%d_%d_%d" + extension,
saveCoords.x,
saveCoords.y,
saveCoords.z
)
);
try (
DataOutputStream output = new DataOutputStream(
new DeflaterOutputStream(new BufferedOutputStream(Files.newOutputStream(path)))
)
) {
ChunkIO.save(chunk, output, IOContext.SAVE);
writeGenerationHint(chunk, output, server);
}
} else if (currentFormat == 65536) {
if (currentFormat == 65536) {
LOG.debug(
"Saving {} {} {}",
chunk.getPosition().x,
@ -430,6 +374,97 @@ public class TestWorldDiskIO {
// LOG.info("Used {} sectors",(int)
// tempData.length/sectorSize + 1);
}
else if (currentFormat == 65537) {
LOG.debug(
"Saving {} {} {}",
chunk.getPosition().x,
chunk.getPosition().y,
chunk.getPosition().z
);
Files.createDirectories(SAVE_DIR);
Vec3i saveCoords = getRegion(chunk.getPosition());
Path path = SAVE_DIR.resolve(
String.format(
"%d_%d_%d" + extension,
saveCoords.x,
saveCoords.y,
saveCoords.z
)
);
RandomFileMapped outputMap = inOutMap.get(new HashableVec3i(saveCoords));
//LOG.info("saveCoords {},{},{}", saveCoords.x, saveCoords.y, saveCoords.z);
if (outputMap == null)
{
outputMap = makeNew(path, new HashableVec3i(saveCoords));
}
RandomAccessFile output = outputMap.file;
Vec3i pos = getRegionLoc(chunk.getPosition());
int shortOffset = 4 * (pos.z + regionSize.z * (pos.y + regionSize.y * pos.x));
int fullOffset = 4 * (chunksPerRegion);
int offset = 0;
if (outputMap.hasOffset(pos))
{
offset = outputMap.getOffset(pos);
}
else {
output.seek(shortOffset);
for (int i = 0; i < 3; i++) {
offset *= 256;
offset += output.read();
}
int sectorLength = output.read();
if (sectorLength == 0) {
int outputLen = (int) output.length();
offset = (int) (outputLen - fullOffset) / sectorSize + 1;
int tempOffset = offset;
output.seek(shortOffset);
byte readOffset[] = new byte[3];
for (int i = 0; i < 3; i++) {
readOffset[2 - i] = (byte) (tempOffset % 256);
tempOffset >>= 8;
}
output.write(readOffset);
output.setLength(fullOffset + offset * sectorSize);
}
outputMap.putOffset(pos, offset);
}
ByteArrayOutputStream tempDataStream = new ByteArrayOutputStream();
DataOutputStream trueOutput = new DataOutputStream(
new DeflaterOutputStream(
new BufferedOutputStream(tempDataStream)
)
);
ChunkIO.save(chunk, trueOutput, IOContext.SAVE);
writeGenerationHint(chunk, trueOutput, server);
trueOutput.close();
byte tempData[] = tempDataStream.toByteArray();
output.seek( fullOffset + sectorSize * offset);
chunk.computeOpaque();
chunk.computeEmpty();
output.write((chunk.isOpaque() ? 1 : 0) << 1 + (chunk.isEmpty() ? 1 : 0)); //Writes extra flag byte of whether or not the chunk is empty or solid
output.write(tempData);
output.seek(shortOffset + 3);
output.write(tempData.length / sectorSize + 1);
outputMap.putLength(pos, tempData.length / sectorSize + 1);
// LOG.info("Used {} sectors",(int)
// tempData.length/sectorSize + 1);
}
// else if (currentFormat)
} catch (IOException e) {
@ -522,94 +557,7 @@ public class TestWorldDiskIO {
}
}
if (currentFormat == 0) {
Path path = SAVE_DIR.resolve(
String.format(
"chunk_%+d_%+d_%+d" + extension,
chunkPos.x,
chunkPos.y,
chunkPos.z
)
);
if (!Files.exists(path)) {
LOG.debug(
"Not found {} {} {}",
chunkPos.x,
chunkPos.y,
chunkPos.z
);
return null;
}
try {
DefaultChunkData result = load(path, chunkPos, world, server);
LOG.debug(
"Loaded {} {} {}",
chunkPos.x,
chunkPos.y,
chunkPos.z
);
return result;
} catch (Exception e) {
e.printStackTrace();
LOG.debug(
"Could not load {} {} {}",
chunkPos.x,
chunkPos.y,
chunkPos.z
);
return null;
}
} else if (currentFormat == 1) {
Vec3i saveCoords = getRegion(chunkPos);
Path path = SAVE_DIR.resolve(
String.format(
"chunk_%d_%d_%d" + extension,
saveCoords.x,
saveCoords.y,
saveCoords.z
)
);
if (!Files.exists(path)) {
LOG.debug(
"Not found {} {} {}",
chunkPos.x,
chunkPos.y,
chunkPos.z
);
return null;
}
try {
DefaultChunkData result = load(path, chunkPos, world, server);
LOG.debug(
"Loaded {} {} {}",
chunkPos.x,
chunkPos.y,
chunkPos.z
);
return result;
} catch (Exception e) {
e.printStackTrace();
LOG.debug(
"Could not load {} {} {}",
chunkPos.x,
chunkPos.y,
chunkPos.z
);
return null;
}
} else if (currentFormat == 65536) {
if (currentFormat == 65536) {
Vec3i saveCoords = getRegion(chunkPos);
Path path = SAVE_DIR.resolve(
@ -654,22 +602,53 @@ public class TestWorldDiskIO {
return null;
}
}
else if (currentFormat == 65537) {
Vec3i saveCoords = getRegion(chunkPos);
Path path = SAVE_DIR.resolve(
String.format(
"%d_%d_%d" + extension,
saveCoords.x,
saveCoords.y,
saveCoords.z
)
);
if (!Files.exists(path)) {
LOG.debug(
"Not found {} {} {}",
chunkPos.x,
chunkPos.y,
chunkPos.z
);
return null;
}
private static DefaultChunkData load(Path path, Vec3i chunkPos, DefaultWorldData world, Server server)
throws IOException,
DecodingException {
try (
DataInputStream input = new DataInputStream(
new InflaterInputStream(new BufferedInputStream(Files.newInputStream(path)))
)
) {
DefaultChunkData chunk = ChunkIO.load(world, chunkPos, input, IOContext.SAVE);
readGenerationHint(chunk, input, server);
return chunk;
try {
DefaultChunkData result = loadRegionX(path, chunkPos, world, server);
LOG.debug(
"Loaded {} {} {}",
chunkPos.x,
chunkPos.y,
chunkPos.z
);
return result;
} catch (Exception e) {
e.printStackTrace();
LOG.debug(
"Could not load {} {} {}",
chunkPos.x,
chunkPos.y,
chunkPos.z
);
return null;
}
}
return null;
}
private static DefaultChunkData loadRegion(Path path, Vec3i chunkPos, DefaultWorldData world, Server server)
throws IOException,
@ -745,6 +724,86 @@ public class TestWorldDiskIO {
return null;
}
private static DefaultChunkData loadRegionX(Path path, Vec3i chunkPos, DefaultWorldData world, Server server)
throws IOException,
DecodingException {
int offset = 0;
int sectorLength = 0;
Vec3i pos;
RandomFileMapped inputMap;
int fullOffset = 4 * (chunksPerRegion);
try
{
Vec3i streamCoords = getRegion(chunkPos);
inputMap = inOutMap.get(new HashableVec3i(streamCoords));
//LOG.info("streamCoords {},{},{}", streamCoords.x,streamCoords.y,streamCoords.z);
if (inputMap == null)
{
//input = new RandomAccessFile(path.toFile(), "rw");
//input = Files.newByteChannel(path);
inputMap = makeNew(path, new HashableVec3i(streamCoords));
}
RandomAccessFile input = inputMap.file;
pos = getRegionLoc(chunkPos);
if (inputMap.hasOffset(pos))
{
offset = inputMap.getOffset(pos);
sectorLength = inputMap.getLength(pos);
//LOG.info("{},{}", offset, sectorLength);
}
else
{
// LOG.info(path.toString());
int shortOffset = 4 * (pos.z + regionSize.z * (pos.y + regionSize.y * pos.x));
input.seek(shortOffset);
for (int i = 0; i < 3; i++) {
offset *= 256;
offset += input.read();
}
sectorLength = input.read();
if (sectorLength == 0)
{
return null;
}
inputMap.putOffset(pos, offset);
inputMap.putLength(pos, sectorLength);
}
input.seek(fullOffset + sectorSize * offset);
int xByte = input.read();
// LOG.info("Read {} sectors", sectorLength);
byte tempData[] = new byte[sectorSize * sectorLength];
input.read(tempData);
DataInputStream trueInput = new DataInputStream(
new InflaterInputStream(new BufferedInputStream(new ByteArrayInputStream(tempData)))
);
DefaultChunkData chunk = ChunkIO.load(world, chunkPos, trueInput, IOContext.SAVE);
readGenerationHint(chunk, trueInput, server);
chunk.isOpaque = (xByte & 2)==2;
chunk.isEmpty = (xByte & 1)==1;
return chunk;
}
catch (EOFException e)
{
LOG.warn("Reached end of file, offset was {}, sectors was {}", offset, sectorLength);
e.printStackTrace();
}
return null;
}
private static void readGenerationHint(DefaultChunkData chunk, DataInputStream input, Server server)
throws IOException,
DecodingException {