diff options
author | n-peugnet <n.peugnet@free.fr> | 2021-09-23 16:24:02 +0200 |
---|---|---|
committer | n-peugnet <n.peugnet@free.fr> | 2021-09-23 17:45:08 +0200 |
commit | e4f6bf0d972b8e851a6fcaa20c1f305680f61884 (patch) | |
tree | 483daf9820dee3a8e658b47082841e1ef12ef99b | |
parent | 2835b35221cb831c4836531204d96e3c5c2d4b13 (diff) | |
download | dna-backup-e4f6bf0d972b8e851a6fcaa20c1f305680f61884.tar.gz dna-backup-e4f6bf0d972b8e851a6fcaa20c1f305680f61884.zip |
better error handling and start checking symlinks
-rw-r--r-- | repo.go | 74 | ||||
-rw-r--r-- | repo_test.go | 23 |
2 files changed, 66 insertions, 31 deletions
@@ -158,7 +158,7 @@ func (r *Repo) Commit(source string) { r.loadHashes(versions) r.loadFileLists(versions) r.loadRecipes(versions) - storeQueue := make(chan chunkData, 10) + storeQueue := make(chan chunkData, 32) storeEnd := make(chan bool) go r.storageWorker(newVersion, storeQueue, storeEnd) var last, nlast, pass uint64 @@ -218,7 +218,25 @@ func listFiles(path string) []File { err := filepath.Walk(path, func(p string, i fs.FileInfo, err error) error { if err != nil { logger.Warning(err) - return err + return nil + } + if i.Mode()&fs.ModeSymlink != 0 { + target, err := filepath.EvalSymlinks(p) + if err != nil { + logger.Warning(err) + return nil + } + i, err = os.Stat(target) + if err != nil { + logger.Warning(err) + return nil + } + if !i.IsDir() { + logger.Warningf("file symlink %s: content will be duplicated", p) + } else { + logger.Warningf("dir symlink %s: will not be followed", p) + return nil + } } if i.IsDir() { return nil @@ -227,7 +245,7 @@ func listFiles(path string) []File { return nil }) if err != nil { - // already logged in callback + logger.Error(err) } return files } @@ -362,10 +380,7 @@ func (r *Repo) storageWorker(version int, storeQueue <-chan chunkData, end chan< encoder := gob.NewEncoder(file) for data := range storeQueue { err = encoder.Encode(data.hashes) - err := r.StoreChunkContent(data.id, bytes.NewReader(data.content)) - if err != nil { - logger.Error(err) - } + r.StoreChunkContent(data.id, bytes.NewReader(data.content)) // logger.Debug("stored ", data.id) } if err = file.Close(); err != nil { @@ -374,24 +389,23 @@ func (r *Repo) storageWorker(version int, storeQueue <-chan chunkData, end chan< end <- true } -func (r *Repo) StoreChunkContent(id *ChunkId, reader io.Reader) error { +func (r *Repo) StoreChunkContent(id *ChunkId, reader io.Reader) { path := id.Path(r.path) file, err := os.Create(path) if err != nil { - return fmt.Errorf("creating chunk for '%s'; %s\n", path, err) + logger.Panic("chunk store ", err) } wrapper := r.chunkWriteWrapper(file) n, err := io.Copy(wrapper, reader) if err != nil { - return fmt.Errorf("writing chunk content for '%s', written %d bytes: %s\n", path, n, err) + logger.Errorf("chunk store, %d written, %s", n, err) } if err := wrapper.Close(); err != nil { - return fmt.Errorf("closing write wrapper for '%s': %s\n", path, err) + logger.Warning("chunk store wrapper ", err) } if err := file.Close(); err != nil { - return fmt.Errorf("closing chunk for '%s': %s\n", path, err) + logger.Warning("chunk store ", err) } - return nil } // LoadChunkContent loads a chunk from the repo. @@ -402,18 +416,21 @@ func (r *Repo) LoadChunkContent(id *ChunkId) *bytes.Reader { path := id.Path(r.path) f, err := os.Open(path) if err != nil { - logger.Errorf("cannot open chunk '%s': %s", path, err) + logger.Panic("chunk load ", err) } wrapper, err := r.chunkReadWrapper(f) if err != nil { - logger.Errorf("cannot create read wrapper for chunk '%s': %s", path, err) + logger.Error("chunk load wrapper ", err) } value, err = io.ReadAll(wrapper) if err != nil { - logger.Panicf("could not read from chunk '%s': %s", path, err) + logger.Error("chunk load ", err) + } + if err = wrapper.Close(); err != nil { + logger.Warning("chunk load wrapper", err) } if err = f.Close(); err != nil { - logger.Warningf("could not close chunk '%s': %s", path, err) + logger.Warning("chunk load ", err) } r.chunkCache.Set(id, value) } @@ -426,7 +443,7 @@ func (r *Repo) loadChunks(versions []string, chunks chan<- IdentifiedChunk) { p := filepath.Join(v, chunksName) entries, err := os.ReadDir(p) if err != nil { - logger.Errorf("reading version '%05d' in '%s' chunks: %s", i, v, err) + logger.Error("version dir ", err) } for j, e := range entries { if e.IsDir() { @@ -444,22 +461,23 @@ func (r *Repo) loadHashes(versions []string) { for i, v := range versions { path := filepath.Join(v, hashesName) file, err := os.Open(path) - if err == nil { - decoder := gob.NewDecoder(file) - for j := 0; err == nil; j++ { - var h chunkHashes - if err = decoder.Decode(&h); err == nil { - id := &ChunkId{i, uint64(j)} - r.fingerprints[h.Fp] = id - r.sketches.Set(h.Sk, id) - } + if err != nil { + logger.Error("hashes ", err) + } + decoder := gob.NewDecoder(file) + for j := 0; err == nil; j++ { + var h chunkHashes + if err = decoder.Decode(&h); err == nil { + id := &ChunkId{i, uint64(j)} + r.fingerprints[h.Fp] = id + r.sketches.Set(h.Sk, id) } } if err != nil && err != io.EOF { logger.Panic(err) } if err = file.Close(); err != nil { - logger.Panic(err) + logger.Warning(err) } } } diff --git a/repo_test.go b/repo_test.go index 6ab7cb0..2d1c6e2 100644 --- a/repo_test.go +++ b/repo_test.go @@ -128,20 +128,37 @@ func TestReadFiles3(t *testing.T) { chunkCompare(t, dataDir, repo, files, chunkCount) } -func TestNoSuchFile(t *testing.T) { +func TestSymlinks(t *testing.T) { var output bytes.Buffer logger.SetOutput(&output) defer logger.SetOutput(os.Stderr) tmpDir := t.TempDir() + extDir := t.TempDir() + extNotWritable := filepath.Join(extDir, "notwritable") + f, err := os.OpenFile(extNotWritable, os.O_CREATE, 0000) + if err != nil { + t.Fatal(err) + } + if err := f.Close(); err != nil { + t.Fatal(err) + } + os.Symlink(extDir, filepath.Join(tmpDir, "linktodir")) os.Symlink("./notexisting", filepath.Join(tmpDir, "linknotexisting")) + os.Symlink(extNotWritable, filepath.Join(tmpDir, "linknotwritable")) var buff bytes.Buffer files := listFiles(tmpDir) testutils.AssertLen(t, 1, files, "Files") concatFiles(&files, utils.NopCloser(&buff)) testutils.AssertLen(t, 0, files, "Files") testutils.AssertLen(t, 0, buff.Bytes(), "Buffer") - if !strings.Contains(output.String(), "linknotexisting") { - t.Errorf("log should contain a warning, actual %q", &output) + if !strings.Contains(output.String(), "linktodir") { + t.Errorf("log should contain a warning for linktodir, actual %q", &output) + } + if !strings.Contains(output.String(), "notexisting") { + t.Errorf("log should contain a warning for notexisting, actual %q", &output) + } + if !strings.Contains(output.String(), "linknotwritable") { + t.Errorf("log should contain a warning for linknotwritable, actual %q", &output) } } |