change: Fixed some tests and alternate implementations

This commit is contained in:
Nicholas Novak 2023-11-18 19:47:09 -08:00
parent 70879ec312
commit 42cad2cecc
4 changed files with 58 additions and 12 deletions

View File

@ -1,6 +1,7 @@
package loading
import (
"path/filepath"
"strconv"
"strings"
@ -20,7 +21,7 @@ func LoadRegionFile(fileName string) ([]world.ChunkData, error) {
defer regionFile.Close()
// Parse the name of the region to find its position within the world
nameParts := strings.Split(fileName, ".")
nameParts := strings.Split(filepath.Base(fileName), ".")
regionX, err := strconv.Atoi(nameParts[1])
if err != nil {
return nil, err

View File

@ -7,12 +7,14 @@ import (
"git.nicholasnovak.io/nnovak/spatial-db/world"
)
var server storage.SimpleServer
func init() {
server.SetStorageRoot("skygrid-save")
}
// insertPointTemplate inserts a configurable variety of points into the server
func insertPointTemplate(testDir string, b *testing.B, pointSpread int) {
var server storage.InMemoryServer
server.SetStorageRoot(testDir)
b.ResetTimer()
for i := 0; i < b.N; i++ {
@ -24,10 +26,6 @@ func insertPointTemplate(testDir string, b *testing.B, pointSpread int) {
}
func fetchChunkTemplate(testDir string, b *testing.B) {
var server storage.SimpleServer
server.SetStorageRoot(testDir)
b.ResetTimer()
for i := 0; i < b.N; i++ {
@ -41,9 +39,9 @@ func fetchChunkTemplate(testDir string, b *testing.B) {
// Insert blocks
func BenchmarkInsertClusteredPoints(b *testing.B) {
insertPointTemplate("test-world", b, 128)
insertPointTemplate("imperial-test", b, 128)
}
func BenchmarkInsertSparserPoints(b *testing.B) {
insertPointTemplate("test-world", b, 2048)
insertPointTemplate("imperial-test", b, 2048)
}

View File

@ -1,6 +1,14 @@
package storage
import "git.nicholasnovak.io/nnovak/spatial-db/world"
import (
"encoding/json"
"os"
"path/filepath"
log "github.com/sirupsen/logrus"
"git.nicholasnovak.io/nnovak/spatial-db/world"
)
type HashServer struct {
blocks map[world.BlockPos]world.BlockID
@ -8,6 +16,37 @@ type HashServer struct {
func (hs *HashServer) SetStorageRoot(path string) {
hs.blocks = make(map[world.BlockPos]world.BlockID)
chunkFiles, err := os.ReadDir(path)
if err != nil {
panic(err)
}
for chunkIndex, chunkFile := range chunkFiles {
var data world.ChunkData
log.Infof("Reading in chunk %d of %d", chunkIndex, len(chunkFiles))
f, err := os.Open(filepath.Join(path, chunkFile.Name()))
if err != nil {
panic(err)
}
// Read each file from disk
if err := json.NewDecoder(f).Decode(&data); err != nil {
panic(err)
}
// Load in each data point from disk
for _, section := range data.Sections {
for blockIndex, blockState := range section.BlockStates {
pos := data.IndexToBlockPos(blockIndex)
hs.blocks[pos] = blockState
}
}
f.Close()
}
}
func (hs *HashServer) FetchChunk(pos world.ChunkPos) (world.ChunkData, error) {

View File

@ -15,6 +15,14 @@ func (cd *ChunkData) SectionFor(pos BlockPos) *ChunkSection {
return &cd.Sections[pos.Y%ChunkSectionCount]
}
func (cd *ChunkData) IndexToBlockPos(index int) BlockPos {
return BlockPos{
X: index%16 + cd.Pos.X,
Y: uint(index % sliceSize),
Z: index%32 + cd.Pos.Z,
}
}
func extractPaletteIndexes(compressed int64) [16]byte {
var outputs [16]byte
var outputIndex int