diff options
author | n-peugnet <n.peugnet@free.fr> | 2021-09-09 14:01:00 +0200 |
---|---|---|
committer | n-peugnet <n.peugnet@free.fr> | 2021-09-09 14:01:00 +0200 |
commit | 1809540dc87c665cafcd6daf514b39b4a17e483f (patch) | |
tree | 61cd19f81988d245697fbb40a7462131968ee3a8 | |
parent | 27cf33b15ee5c028f4816607c034df68adf7df4f (diff) | |
download | dna-backup-1809540dc87c665cafcd6daf514b39b4a17e483f.tar.gz dna-backup-1809540dc87c665cafcd6daf514b39b4a17e483f.zip |
remove merge and extract tempchunks
-rw-r--r-- | repo.go | 41 | ||||
-rw-r--r-- | repo_test.go | 17 |
2 files changed, 1 insertions, 57 deletions
@@ -511,47 +511,6 @@ func loadRecipe(path string) []Chunk { return recipe } -// mergeTempChunks joins temporary partial chunks from an array of chunks if possible. -// If a chunk is smaller than the size required to calculate a super-feature, -// it is then appended to the previous consecutive temporary chunk if it exists. -func (r *Repo) mergeTempChunks(chunks []Chunk) (ret []Chunk) { - var prev *TempChunk - var curr *TempChunk - for _, c := range chunks { - tmp, isTmp := c.(*TempChunk) - if !isTmp { - if prev != nil && curr.Len() <= sketch.SuperFeatureSize(r.chunkSize, r.sketchSfCount, r.sketchFCount) { - prev.AppendFrom(curr.Reader()) - } else if curr != nil { - ret = append(ret, curr) - } - ret = append(ret, c) - curr = nil - prev = nil - } else { - prev = curr - curr = tmp - if prev != nil { - ret = append(ret, prev) - } - } - } - if curr != nil { - ret = append(ret, curr) - } - return -} - -func extractTempChunks(chunks []Chunk) (ret []*TempChunk) { - for _, c := range chunks { - tmp, isTmp := c.(*TempChunk) - if isTmp { - ret = append(ret, tmp) - } - } - return -} - func extractDeltaChunks(chunks []Chunk) (ret []*DeltaChunk) { for _, c := range chunks { tmp, isDelta := c.(*DeltaChunk) diff --git a/repo_test.go b/repo_test.go index 07af682..29282bc 100644 --- a/repo_test.go +++ b/repo_test.go @@ -159,21 +159,6 @@ func TestLoadChunks(t *testing.T) { } } -func TestExtractNewChunks(t *testing.T) { - repo := NewRepo("") - chunks := []Chunk{ - &TempChunk{Value: []byte{'a'}}, - &LoadedChunk{Id: &ChunkId{0, 0}}, - &TempChunk{Value: []byte{'b'}}, - &TempChunk{Value: []byte{'c'}}, - &LoadedChunk{Id: &ChunkId{0, 1}}, - } - newChunks := extractTempChunks(repo.mergeTempChunks(chunks)) - assertLen(t, 2, newChunks, "New chunks:") - assertChunkContent(t, []byte{'a'}, newChunks[0], "First new:") - assertChunkContent(t, []byte{'b', 'c'}, newChunks[1], "Second New:") -} - func TestStoreLoadFiles(t *testing.T) { resultDir := t.TempDir() dataDir := path.Join("testdata", "logs") @@ -233,7 +218,7 @@ func TestBsdiff(t *testing.T) { // Read new data reader := getDataStream(dataDir, concatFiles) recipe := repo.matchStream(reader, newVersion) - newChunks := extractDeltaChunks(repo.mergeTempChunks(recipe)) + newChunks := extractDeltaChunks(recipe) assertLen(t, 2, newChunks, "New delta chunks:") for _, c := range newChunks { log.Println("Patch size:", len(c.Patch)) |