ref: 04b424a6398637dd0f5d29857f489a03fd5e38f5
parent: 6375255f0cc648e0a0f26c3b9c461f9959fe8ad5
author: Runxi Yu <me@runxiyu.org>
date: Sat Mar 7 09:55:38 EST 2026
refstore/files: Add new files backend (And use it in repository)
--- /dev/null
+++ b/refstore/files/helpers_test.go
@@ -1,0 +1,147 @@
+package files_test
+
+import (
+ "os"
+ "slices"
+ "strings"
+ "testing"
+
+ "codeberg.org/lindenii/furgit/internal/testgit"
+ "codeberg.org/lindenii/furgit/objectid"
+ "codeberg.org/lindenii/furgit/refstore/files"
+)
+
+func openFilesStore(t *testing.T, testRepo *testgit.TestRepo, algo objectid.Algorithm) *files.Store {+ t.Helper()
+
+ root := testRepo.OpenGitRoot(t)
+
+ store, err := files.New(root, algo)
+ if err != nil {+ t.Fatalf("files.New: %v", err)+ }
+
+ return store
+}
+
+func openFilesStoreAt(t *testing.T, root *os.Root, algo objectid.Algorithm) *files.Store {+ t.Helper()
+
+ store, err := files.New(root, algo)
+ if err != nil {+ t.Fatalf("files.New: %v", err)+ }
+
+ return store
+}
+
+func openGitRootUnder(t *testing.T, repoRoot *os.Root, worktreeName string) *os.Root {+ t.Helper()
+
+ worktreeRoot, err := repoRoot.OpenRoot(worktreeName)
+ if err != nil {+ t.Fatalf("OpenRoot(%q): %v", worktreeName, err)+ }
+
+ t.Cleanup(func() {+ _ = worktreeRoot.Close()
+ })
+
+ info, err := worktreeRoot.Stat(".git")+ if err != nil {+ t.Fatalf("stat %q: %v", worktreeName+"/.git", err)+ }
+
+ if info.IsDir() {+ gitRoot, err := worktreeRoot.OpenRoot(".git")+ if err != nil {+ t.Fatalf("OpenRoot(.git): %v", err)+ }
+
+ t.Cleanup(func() {+ _ = gitRoot.Close()
+ })
+
+ return gitRoot
+ }
+
+ content, err := worktreeRoot.ReadFile(".git")+ if err != nil {+ t.Fatalf("read %q: %v", worktreeName+"/.git", err)+ }
+
+ gitDir := strings.TrimSpace(strings.TrimPrefix(string(content), "gitdir:"))
+ if gitDir == "" {+ t.Fatalf("%q does not contain a gitdir path", worktreeName+"/.git")+ }
+
+ if strings.HasPrefix(gitDir, "/") {+ gitRoot, err := os.OpenRoot(gitDir)
+ if err != nil {+ t.Fatalf("os.OpenRoot(%q): %v", gitDir, err)+ }
+
+ t.Cleanup(func() {+ _ = gitRoot.Close()
+ })
+
+ return gitRoot
+ }
+
+ gitRoot, err := worktreeRoot.OpenRoot(gitDir)
+ if err != nil {+ t.Fatalf("os.OpenRoot(%q): %v", gitDir, err)+ }
+
+ t.Cleanup(func() {+ _ = gitRoot.Close()
+ })
+
+ return gitRoot
+}
+
+func assertListMatchesGitForEachRef(t *testing.T, gitOut string, store *files.Store) {+ t.Helper()
+
+ listed, err := store.List("")+ if err != nil {+ t.Fatalf("List(\"\"): %v", err)+ }
+
+ gotNames := make([]string, 0, len(listed))
+ for _, got := range listed {+ if got.Name() == "HEAD" {+ continue
+ }
+
+ gotNames = append(gotNames, got.Name())
+ }
+
+ slices.Sort(gotNames)
+
+ wantLines := strings.Split(strings.TrimSpace(gitOut), "\n")
+ wantNames := make([]string, 0, len(wantLines))
+
+ for _, line := range wantLines {+ line = strings.TrimSpace(line)
+ if line == "" {+ continue
+ }
+
+ wantNames = append(wantNames, line)
+ }
+
+ slices.Sort(wantNames)
+
+ if !slices.Equal(gotNames, wantNames) {+ t.Fatalf("List names = %v, want %v", gotNames, wantNames)+ }
+}
+
+func forEachRefLines(output string) []string {+ if strings.TrimSpace(output) == "" {+ return nil
+ }
+
+ return strings.Split(strings.TrimSpace(output), "\n")
+}
--- /dev/null
+++ b/refstore/files/packed_delete_test.go
@@ -1,0 +1,293 @@
+package files_test
+
+import (
+ "errors"
+ "os"
+ "slices"
+ "sync"
+ "testing"
+ "time"
+
+ "codeberg.org/lindenii/furgit/internal/testgit"
+ "codeberg.org/lindenii/furgit/objectid"
+ "codeberg.org/lindenii/furgit/refstore"
+)
+
+func TestFilesTransactionPackedDeleteFailureLeavesRefsUnchanged(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ t.Run("packed-refs.lock held", func(t *testing.T) {+ t.Parallel()
+
+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true, RefFormat: "files"})+ _, _, packedID := testRepo.MakeCommit(t, "packed")
+ _, _, looseID := testRepo.MakeCommit(t, "loose")
+ prefix := "refs/locked-packed-refs"
+
+ testRepo.UpdateRef(t, prefix+"/foo", packedID)
+ testRepo.PackRefs(t, "--all", "--prune")
+ testRepo.UpdateRef(t, prefix+"/foo", looseID)
+ unchanged := forEachRefLines(testRepo.Run(t, "for-each-ref", "--format=%(objectname) %(refname)", prefix))
+ testRepo.WriteFile(t, "packed-refs.lock", []byte{}, 0o644)+
+ store := openFilesStore(t, testRepo, algo)
+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(lock held): %v", err)+ }
+
+ err = tx.Delete(prefix+"/foo", looseID)
+ if err != nil {+ t.Fatalf("Delete(lock held) queue: %v", err)+ }
+
+ err = tx.Commit()
+ if err == nil {+ t.Fatal("Commit(lock held) unexpectedly succeeded")+ }
+
+ actual := forEachRefLines(testRepo.Run(t, "for-each-ref", "--format=%(objectname) %(refname)", prefix))
+ if !slices.Equal(actual, unchanged) {+ t.Fatalf("ShowRef after failed delete = %v, want %v", actual, unchanged)+ }
+
+ got, err := store.ResolveFully(prefix + "/foo")
+ if err != nil {+ t.Fatalf("ResolveFully(lock held): %v", err)+ }
+
+ if got.ID != looseID {+ t.Fatalf("ResolveFully(lock held) = %s, want %s", got.ID, looseID)+ }
+
+ gitRoot := testRepo.OpenGitRoot(t)
+
+ _, statErr := gitRoot.Stat(prefix + "/foo.lock")
+ if !errors.Is(statErr, os.ErrNotExist) {+ t.Fatalf("unexpected leftover loose lock: %v", statErr)+ }
+ })
+
+ t.Run("packed-refs.new exists", func(t *testing.T) {+ t.Parallel()
+
+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true, RefFormat: "files"})+ _, _, packedID := testRepo.MakeCommit(t, "packed")
+ _, _, looseID := testRepo.MakeCommit(t, "loose")
+ prefix := "refs/failed-packed-refs"
+
+ testRepo.UpdateRef(t, prefix+"/foo", packedID)
+ testRepo.PackRefs(t, "--all", "--prune")
+ testRepo.UpdateRef(t, prefix+"/foo", looseID)
+ unchanged := forEachRefLines(testRepo.Run(t, "for-each-ref", "--format=%(objectname) %(refname)", prefix))
+ testRepo.WriteFile(t, "packed-refs.new", []byte{}, 0o644)+
+ store := openFilesStore(t, testRepo, algo)
+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(new exists): %v", err)+ }
+
+ err = tx.Delete(prefix+"/foo", looseID)
+ if err != nil {+ t.Fatalf("Delete(new exists) queue: %v", err)+ }
+
+ err = tx.Commit()
+ if err == nil {+ t.Fatal("Commit(new exists) unexpectedly succeeded")+ }
+
+ actual := forEachRefLines(testRepo.Run(t, "for-each-ref", "--format=%(objectname) %(refname)", prefix))
+ if !slices.Equal(actual, unchanged) {+ t.Fatalf("ShowRef after failed delete = %v, want %v", actual, unchanged)+ }
+
+ got, err := store.ResolveFully(prefix + "/foo")
+ if err != nil {+ t.Fatalf("ResolveFully(new exists): %v", err)+ }
+
+ if got.ID != looseID {+ t.Fatalf("ResolveFully(new exists) = %s, want %s", got.ID, looseID)+ }
+ })
+ })
+}
+
+func TestFilesPackedRefDeleteDoesNotCreateDirectories(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true, RefFormat: "files"})+ _, _, commitID := testRepo.MakeCommit(t, "packed-only")
+ name := "refs/heads/d1/d2/r1"
+
+ testRepo.UpdateRef(t, name, commitID)
+ testRepo.PackRefs(t, "--all", "--prune")
+
+ gitRoot := testRepo.OpenGitRoot(t)
+
+ _, err := gitRoot.Stat("refs/heads/d1/d2")+ if !errors.Is(err, os.ErrNotExist) {+ t.Fatalf("refs/heads/d1/d2 unexpectedly exists before delete: %v", err)+ }
+
+ store := openFilesStore(t, testRepo, algo)
+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction: %v", err)+ }
+
+ err = tx.Delete(name, commitID)
+ if err != nil {+ t.Fatalf("Delete queue: %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit: %v", err)+ }
+
+ _, err = gitRoot.Stat("refs/heads/d1/d2")+ if !errors.Is(err, os.ErrNotExist) {+ t.Fatalf("refs/heads/d1/d2 unexpectedly exists after delete: %v", err)+ }
+
+ _, err = gitRoot.Stat("refs/heads/d1")+ if !errors.Is(err, os.ErrNotExist) {+ t.Fatalf("refs/heads/d1 unexpectedly exists after delete: %v", err)+ }
+ })
+}
+
+func TestFilesPackedRefIgnoresEmptyDirectories(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true, RefFormat: "files"})+ _, _, commitID := testRepo.MakeCommit(t, "packed-visible")
+ prefix := "refs/e-for-each-ref"
+ name := prefix + "/foo"
+
+ testRepo.UpdateRef(t, name, commitID)
+ expected := forEachRefLines(testRepo.Run(t, "for-each-ref", "--format=%(objectname) %(refname)", prefix))
+ testRepo.PackRefs(t, "--all", "--prune")
+ testRepo.WriteFileAll(t, prefix+"/foo/bar/baz/.keep", []byte{}, 0o755, 0o644)+ testRepo.Remove(t, prefix+"/foo/bar/baz/.keep")
+
+ store := openFilesStore(t, testRepo, algo)
+
+ got, err := store.ResolveFully(name)
+ if err != nil {+ t.Fatalf("ResolveFully: %v", err)+ }
+
+ if got.ID != commitID {+ t.Fatalf("ResolveFully = %s, want %s", got.ID, commitID)+ }
+
+ actual := make([]string, 0)
+
+ listed, err := store.List(prefix + "/*")
+ if err != nil {+ t.Fatalf("List: %v", err)+ }
+
+ for _, entry := range listed {+ actual = append(actual, entry.Name())
+ }
+
+ fullActual := make([]string, 0, len(actual))
+ for _, name := range actual {+ refValue, resolveErr := store.ResolveFully(name)
+ if resolveErr != nil {+ t.Fatalf("ResolveFully(%q): %v", name, resolveErr)+ }
+
+ fullActual = append(fullActual, refValue.ID.String()+" "+name)
+ }
+
+ slices.Sort(fullActual)
+
+ if !slices.Equal(fullActual, expected) {+ t.Fatalf("for-each-ref view = %v, want %v", fullActual, expected)+ }
+ })
+}
+
+func TestFilesDeleteWaitsForPackedRefsLockWithoutIntermediateState(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true, RefFormat: "files"})+ _, _, packedID := testRepo.MakeCommit(t, "packed")
+ _, _, looseID := testRepo.MakeCommit(t, "loose")
+ prefix := "refs/slow-transaction"
+
+ testRepo.UpdateRef(t, prefix+"/foo", packedID)
+ testRepo.PackRefs(t, "--all", "--prune")
+ testRepo.UpdateRef(t, prefix+"/foo", looseID)
+ testRepo.Run(t, "config", "core.packedrefstimeout", "3000")
+ testRepo.WriteFile(t, "packed-refs.lock", []byte{}, 0o644)+
+ store := openFilesStore(t, testRepo, algo)
+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction: %v", err)+ }
+
+ err = tx.Delete(prefix+"/foo", looseID)
+ if err != nil {+ t.Fatalf("Delete queue: %v", err)+ }
+
+ done := make(chan error, 1)
+
+ var wg sync.WaitGroup
+
+ wg.Go(func() {+ done <- tx.Commit()
+ })
+
+ time.Sleep(75 * time.Millisecond)
+
+ select {+ case err := <-done:
+ t.Fatalf("Commit finished too early: %v", err)+ default:
+ }
+
+ got, err := store.ResolveFully(prefix + "/foo")
+ if err != nil {+ t.Fatalf("ResolveFully while lock held: %v", err)+ }
+
+ if got.ID != looseID {+ t.Fatalf("ResolveFully while lock held = %s, want %s", got.ID, looseID)+ }
+
+ testRepo.Remove(t, "packed-refs.lock")
+
+ select {+ case err := <-done:
+ if err != nil {+ t.Fatalf("Commit after lock release: %v", err)+ }
+ case <-time.After(2 * time.Second):
+ t.Fatal("Commit did not finish after lock release")+ }
+
+ wg.Wait()
+
+ _, err = store.Resolve(prefix + "/foo")
+ if !errors.Is(err, refstore.ErrReferenceNotFound) {+ t.Fatalf("Resolve after delete error = %v, want ErrReferenceNotFound", err)+ }
+ })
+}
--- /dev/null
+++ b/refstore/files/packed_refs.go
@@ -1,0 +1,150 @@
+package files
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "strings"
+
+ "codeberg.org/lindenii/furgit/objectid"
+ "codeberg.org/lindenii/furgit/ref"
+)
+
+type packedRefs struct {+ byName map[string]ref.Detached
+ ordered []ref.Detached
+}
+
+func (store *Store) readPackedRefs() (*packedRefs, error) {+ file, err := store.commonRoot.Open("packed-refs")+ if err != nil {+ if errorsIsNotExist(err) {+ return &packedRefs{+ byName: make(map[string]ref.Detached),
+ ordered: nil,
+ }, nil
+ }
+
+ return nil, fmt.Errorf("refstore/files: open packed-refs: %w", err)+ }
+
+ defer func() { _ = file.Close() }()+
+ byName, ordered, err := parsePackedRefs(file, store.algo)
+ if err != nil {+ return nil, err
+ }
+
+ return &packedRefs{+ byName: byName,
+ ordered: ordered,
+ }, nil
+}
+
+func parsePackedRefs(r io.Reader, algo objectid.Algorithm) (map[string]ref.Detached, []ref.Detached, error) {+ byName := make(map[string]ref.Detached)
+ ordered := make([]ref.Detached, 0, 32)
+
+ br := bufio.NewReader(r)
+ prev := -1
+ lineNum := 0
+ hexsz := algo.Size() * 2
+
+ for {+ line, err := br.ReadString('\n')+ if err != nil && err != io.EOF {+ return nil, nil, err
+ }
+
+ if line == "" && err == io.EOF {+ break
+ }
+
+ lineNum++
+ hadNewline := strings.HasSuffix(line, "\n")
+ line = strings.TrimSuffix(line, "\n")
+
+ if err == io.EOF && !hadNewline {+ return nil, nil, fmt.Errorf("refstore/files: line %d: unterminated line", lineNum)+ }
+
+ if line == "" || strings.HasPrefix(line, "#") {+ if err == io.EOF {+ break
+ }
+
+ continue
+ }
+
+ if strings.HasPrefix(line, "^") {+ if prev < 0 {+ return nil, nil, fmt.Errorf("refstore/files: line %d: peeled line without preceding ref", lineNum)+ }
+
+ if len(line) != hexsz+1 {+ return nil, nil, fmt.Errorf("refstore/files: line %d: malformed peeled line", lineNum)+ }
+
+ peeled, parseErr := objectid.ParseHex(algo, line[1:])
+ if parseErr != nil {+ return nil, nil, fmt.Errorf("refstore/files: line %d: invalid peeled oid: %w", lineNum, parseErr)+ }
+
+ peeledCopy := peeled
+ cur := ordered[prev]
+ cur.Peeled = &peeledCopy
+ ordered[prev] = cur
+ byName[cur.Name()] = cur
+
+ if err == io.EOF {+ break
+ }
+
+ continue
+ }
+
+ if len(line) < hexsz+2 {+ return nil, nil, fmt.Errorf("refstore/files: line %d: malformed entry", lineNum)+ }
+
+ if line[hexsz] != ' ' {+ return nil, nil, fmt.Errorf("refstore/files: line %d: malformed entry", lineNum)+ }
+
+ idText := line[:hexsz]
+
+ name := line[hexsz+1:]
+ if name == "" {+ return nil, nil, fmt.Errorf("refstore/files: line %d: empty ref name", lineNum)+ }
+
+ id, parseErr := objectid.ParseHex(algo, idText)
+ if parseErr != nil {+ return nil, nil, fmt.Errorf("refstore/files: line %d: invalid oid: %w", lineNum, parseErr)+ }
+
+ if _, exists := byName[name]; exists {+ return nil, nil, fmt.Errorf("refstore/files: line %d: duplicate ref %q", lineNum, name)+ }
+
+ detached := ref.Detached{+ RefName: name,
+ ID: id,
+ }
+ ordered = append(ordered, detached)
+ prev = len(ordered) - 1
+ byName[name] = detached
+
+ if err == io.EOF {+ break
+ }
+ }
+
+ return byName, ordered, nil
+}
+
+func errorsIsNotExist(err error) bool {+ return errors.Is(err, os.ErrNotExist)
+}
--- /dev/null
+++ b/refstore/files/read.go
@@ -1,0 +1,74 @@
+package files
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "strings"
+
+ "codeberg.org/lindenii/furgit/objectid"
+ "codeberg.org/lindenii/furgit/ref"
+ "codeberg.org/lindenii/furgit/refstore"
+)
+
+type brokenRefError struct {+ name string
+ err error
+}
+
+func (err brokenRefError) Error() string {+ return fmt.Sprintf("refstore/files: broken reference %q: %v", err.name, err.err)+}
+
+func (err brokenRefError) Unwrap() error {+ return err.err
+}
+
+func (store *Store) readLooseRef(name string) (ref.Ref, error) { //nolint:ireturn+ refPath := store.loosePath(name)
+
+ data, err := store.rootFor(refPath.root).ReadFile(refPath.path)
+ if err != nil {+ if errors.Is(err, os.ErrNotExist) {+ return nil, refstore.ErrReferenceNotFound
+ }
+
+ return nil, err
+ }
+
+ line := trimTrailingRefWhitespace(string(data))
+ if strings.HasPrefix(line, "ref:") {+ target := strings.TrimLeftFunc(line[len("ref:"):], isRefWhitespace)+ if target == "" {+ return nil, brokenRefError{name: name, err: fmt.Errorf("empty symbolic target")}+ }
+
+ return ref.Symbolic{+ RefName: name,
+ Target: target,
+ }, nil
+ }
+
+ id, err := objectid.ParseHex(store.algo, line)
+ if err != nil {+ return nil, brokenRefError{name: name, err: err}+ }
+
+ return ref.Detached{+ RefName: name,
+ ID: id,
+ }, nil
+}
+
+func trimTrailingRefWhitespace(text string) string {+ return strings.TrimRightFunc(text, isRefWhitespace)
+}
+
+func isRefWhitespace(r rune) bool {+ switch r {+ case ' ', '\t', '\n', '\r', '\v', '\f':
+ return true
+ default:
+ return false
+ }
+}
--- /dev/null
+++ b/refstore/files/read_list.go
@@ -1,0 +1,148 @@
+package files
+
+import (
+ "errors"
+ "os"
+ "path"
+ "slices"
+ "strings"
+
+ "codeberg.org/lindenii/furgit/ref"
+ "codeberg.org/lindenii/furgit/refstore"
+)
+
+// List lists references from the visible files ref namespace.
+func (store *Store) List(pattern string) ([]ref.Ref, error) {+ matchAll := pattern == ""
+ if !matchAll {+ _, err := path.Match(pattern, "HEAD")
+ if err != nil {+ return nil, err
+ }
+ }
+
+ looseNames, err := store.collectLooseRefNames()
+ if err != nil {+ return nil, err
+ }
+
+ packed, err := store.readPackedRefs()
+ if err != nil {+ return nil, err
+ }
+
+ byName := make(map[string]ref.Ref, len(looseNames)+len(packed.byName))
+ for _, detached := range packed.ordered {+ byName[detached.Name()] = detached
+ }
+
+ for _, name := range looseNames {+ resolved, resolveErr := store.readLooseRef(name)
+ if resolveErr != nil {+ if errors.Is(resolveErr, refstore.ErrReferenceNotFound) {+ delete(byName, name)
+
+ continue
+ }
+
+ return nil, resolveErr
+ }
+
+ byName[name] = resolved
+ }
+
+ names := make([]string, 0, len(byName))
+ for name := range byName {+ if !matchAll {+ matched, matchErr := path.Match(pattern, name)
+ if matchErr != nil {+ return nil, matchErr
+ }
+
+ if !matched {+ continue
+ }
+ }
+
+ names = append(names, name)
+ }
+
+ slices.Sort(names)
+
+ refs := make([]ref.Ref, 0, len(names))
+ for _, name := range names {+ refs = append(refs, byName[name])
+ }
+
+ return refs, nil
+}
+
+func (store *Store) collectLooseRefNames() ([]string, error) {+ names := make([]string, 0, 16)
+ seen := make(map[string]struct{}, 16)+
+ _, err := store.gitRoot.Stat("HEAD")+ if err == nil {+ names = append(names, "HEAD")
+ seen["HEAD"] = struct{}{}+ } else if !errors.Is(err, os.ErrNotExist) {+ return nil, err
+ }
+
+ var walk func(*os.Root, string) error
+
+ walk = func(root *os.Root, dir string) error {+ file, openErr := root.Open(dir)
+ if openErr != nil {+ if errors.Is(openErr, os.ErrNotExist) {+ return nil
+ }
+
+ return openErr
+ }
+
+ defer func() { _ = file.Close() }()+
+ entries, readErr := file.ReadDir(-1)
+ if readErr != nil {+ return readErr
+ }
+
+ for _, entry := range entries {+ name := path.Join(dir, entry.Name())
+ if entry.IsDir() {+ err := walk(root, name)
+ if err != nil {+ return err
+ }
+
+ continue
+ }
+
+ if strings.HasSuffix(name, ".lock") {+ continue
+ }
+
+ if _, ok := seen[name]; ok {+ continue
+ }
+
+ seen[name] = struct{}{}+ names = append(names, name)
+ }
+
+ return nil
+ }
+
+ err = walk(store.commonRoot, "refs")
+ if err != nil {+ return nil, err
+ }
+
+ err = walk(store.gitRoot, "refs")
+ if err != nil {+ return nil, err
+ }
+
+ return names, nil
+}
--- /dev/null
+++ b/refstore/files/read_resolve.go
@@ -1,0 +1,77 @@
+package files
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+
+ "codeberg.org/lindenii/furgit/ref"
+ "codeberg.org/lindenii/furgit/refstore"
+)
+
+// Resolve resolves one reference name from the files store visible namespace.
+func (store *Store) Resolve(name string) (ref.Ref, error) { //nolint:ireturn+ if name == "" {+ return nil, refstore.ErrReferenceNotFound
+ }
+
+ resolved, err := store.readLooseRef(name)
+ if err == nil {+ return resolved, nil
+ }
+
+ if !errors.Is(err, refstore.ErrReferenceNotFound) {+ refPath := store.loosePath(name)
+
+ info, statErr := store.rootFor(refPath.root).Stat(refPath.path)
+ if statErr != nil || !info.IsDir() {+ return nil, err
+ }
+ }
+
+ packed, packedErr := store.readPackedRefs()
+ if packedErr != nil {+ return nil, packedErr
+ }
+
+ detached, ok := packed.byName[name]
+ if !ok {+ return nil, refstore.ErrReferenceNotFound
+ }
+
+ return detached, nil
+}
+
+// ResolveFully resolves symbolic references through the visible files store
+// namespace until one detached reference is reached.
+func (store *Store) ResolveFully(name string) (ref.Detached, error) {+ cur := name
+ seen := make(map[string]struct{})+
+ for {+ if _, ok := seen[cur]; ok {+ return ref.Detached{}, fmt.Errorf("refstore/files: symbolic reference cycle at %q", cur)+ }
+
+ seen[cur] = struct{}{}+
+ resolved, err := store.Resolve(cur)
+ if err != nil {+ return ref.Detached{}, err+ }
+
+ switch resolved := resolved.(type) {+ case ref.Detached:
+ return resolved, nil
+ case ref.Symbolic:
+ target := strings.TrimSpace(resolved.Target)
+ if target == "" {+ return ref.Detached{}, fmt.Errorf("refstore/files: symbolic reference %q has empty target", resolved.Name())+ }
+
+ cur = target
+ default:
+ return ref.Detached{}, fmt.Errorf("refstore/files: unsupported reference type %T", resolved)+ }
+ }
+}
--- /dev/null
+++ b/refstore/files/resolve_list_test.go
@@ -1,0 +1,269 @@
+package files_test
+
+import (
+ "slices"
+ "testing"
+
+ "codeberg.org/lindenii/furgit/internal/testgit"
+ "codeberg.org/lindenii/furgit/objectid"
+ "codeberg.org/lindenii/furgit/ref"
+)
+
+func TestFilesResolveAndListOverlay(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true})+ _, _, packedID := testRepo.MakeCommit(t, "packed base")
+ _, _, looseID := testRepo.MakeCommit(t, "loose override")
+ testRepo.UpdateRef(t, "refs/heads/main", packedID)
+ testRepo.UpdateRef(t, "refs/tags/v1", packedID)
+ testRepo.SymbolicRef(t, "HEAD", "refs/heads/main")
+ testRepo.PackRefs(t, "--all", "--prune")
+ testRepo.UpdateRef(t, "refs/heads/main", looseID)
+ testRepo.UpdateRef(t, "refs/heads/dev", looseID)
+
+ store := openFilesStore(t, testRepo, algo)
+
+ resolvedMain, err := store.Resolve("refs/heads/main")+ if err != nil {+ t.Fatalf("Resolve(main): %v", err)+ }
+
+ mainDet, ok := resolvedMain.(ref.Detached)
+ if !ok {+ t.Fatalf("Resolve(main) type = %T, want ref.Detached", resolvedMain)+ }
+
+ if mainDet.ID != looseID {+ t.Fatalf("Resolve(main) id = %s, want %s", mainDet.ID, looseID)+ }
+
+ resolvedHead, err := store.Resolve("HEAD")+ if err != nil {+ t.Fatalf("Resolve(HEAD): %v", err)+ }
+
+ headSym, ok := resolvedHead.(ref.Symbolic)
+ if !ok {+ t.Fatalf("Resolve(HEAD) type = %T, want ref.Symbolic", resolvedHead)+ }
+
+ if headSym.Target != "refs/heads/main" {+ t.Fatalf("Resolve(HEAD) target = %q, want %q", headSym.Target, "refs/heads/main")+ }
+
+ fullHead, err := store.ResolveFully("HEAD")+ if err != nil {+ t.Fatalf("ResolveFully(HEAD): %v", err)+ }
+
+ if fullHead.ID != looseID {+ t.Fatalf("ResolveFully(HEAD) = %s, want %s", fullHead.ID, looseID)+ }
+
+ allRefs, err := store.List("")+ if err != nil {+ t.Fatalf("List(\"\"): %v", err)+ }
+
+ names := make([]string, 0, len(allRefs))
+ for _, entry := range allRefs {+ names = append(names, entry.Name())
+ }
+
+ slices.Sort(names)
+
+ want := []string{"HEAD", "refs/heads/dev", "refs/heads/main", "refs/tags/v1"}+ if !slices.Equal(names, want) {+ t.Fatalf("List(\"\") names = %v, want %v", names, want)+ }
+ })
+}
+
+func TestFilesLooseRefParsingMatchesGit(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, RefFormat: "files"})+ oid := testRepo.HashObject(t, "blob", []byte("payload\n"))+
+ testRepo.WriteFileAll(t, ".git/refs/heads/no-lf", []byte(oid.String()), 0o755, 0o644)
+ testRepo.WriteFileAll(t, ".git/refs/heads/trailing-ws", []byte(oid.String()+" "), 0o755, 0o644)
+ testRepo.WriteFileAll(t, ".git/refs/heads/leading-ws", []byte(" "+oid.String()+"\n"), 0o755, 0o644)+ testRepo.WriteFileAll(t, ".git/refs/heads/sym-trailing", []byte("ref: refs/heads/main "), 0o755, 0o644)+ testRepo.WriteFileAll(t, ".git/refs/heads/sym-leading", []byte(" ref: refs/heads/main\n"), 0o755, 0o644)+
+ store := openFilesStore(t, testRepo, algo)
+
+ got, err := store.ResolveFully("refs/heads/no-lf")+ if err != nil {+ t.Fatalf("ResolveFully(no-lf): %v", err)+ }
+
+ if got.ID != oid {+ t.Fatalf("ResolveFully(no-lf) = %s, want %s", got.ID, oid)+ }
+
+ got, err = store.ResolveFully("refs/heads/trailing-ws")+ if err != nil {+ t.Fatalf("ResolveFully(trailing-ws): %v", err)+ }
+
+ if got.ID != oid {+ t.Fatalf("ResolveFully(trailing-ws) = %s, want %s", got.ID, oid)+ }
+
+ _, err = store.Resolve("refs/heads/leading-ws")+ if err == nil {+ t.Fatal("Resolve(leading-ws) unexpectedly succeeded")+ }
+
+ resolved, err := store.Resolve("refs/heads/sym-trailing")+ if err != nil {+ t.Fatalf("Resolve(sym-trailing): %v", err)+ }
+
+ sym, ok := resolved.(ref.Symbolic)
+ if !ok {+ t.Fatalf("Resolve(sym-trailing) type = %T, want ref.Symbolic", resolved)+ }
+
+ if sym.Target != "refs/heads/main" {+ t.Fatalf("Resolve(sym-trailing) target = %q, want %q", sym.Target, "refs/heads/main")+ }
+
+ _, err = store.Resolve("refs/heads/sym-leading")+ if err == nil {+ t.Fatal("Resolve(sym-leading) unexpectedly succeeded")+ }
+ })
+}
+
+func TestFilesRejectMalformedPackedRefs(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true, RefFormat: "files"})+ _, _, commitID := testRepo.MakeCommit(t, "packed")
+ testRepo.UpdateRef(t, "refs/heads/main", commitID)
+ testRepo.PackRefs(t, "--all", "--prune")
+
+ hex := commitID.String()
+
+ cases := []struct {+ name string
+ content string
+ }{+ {+ name: "unterminated line",
+ content: "# pack-refs with: peeled fully-peeled sorted\n" + hex + " refs/heads/main",
+ },
+ {+ name: "junk line",
+ content: "# pack-refs with: peeled fully-peeled sorted\nbogus content\n",
+ },
+ {+ name: "short oid",
+ content: "# pack-refs with: peeled fully-peeled sorted\n" + hex[:7] + " refs/heads/main\n",
+ },
+ {+ name: "trailing garbage after oid",
+ content: "# pack-refs with: peeled fully-peeled sorted\n" + hex + "xrefs/heads/main\n",
+ },
+ {+ name: "malformed peeled line",
+ content: "# pack-refs with: peeled fully-peeled sorted\n" + hex + " refs/tags/v1\n^" + hex + " garbage\n",
+ },
+ }
+
+ for _, tc := range cases {+ t.Run(tc.name, func(t *testing.T) {+ testRepo.WriteFile(t, "packed-refs", []byte(tc.content), 0o644)
+ store := openFilesStore(t, testRepo, algo)
+
+ _, err := store.List("")+ if err == nil {+ t.Fatal("List unexpectedly succeeded")+ }
+ })
+ }
+ })
+}
+
+func TestFilesPackedRefsReadSemanticsMatchGit(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ t.Run("stale packed entry is still readable", func(t *testing.T) {+ t.Parallel()
+
+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, RefFormat: "files"})+ testRepo.Run(t, "commit", "--allow-empty", "-m", "one")
+
+ oneID, err := objectid.ParseHex(algo, testRepo.Run(t, "rev-parse", "HEAD"))
+ if err != nil {+ t.Fatalf("ParseHex(one): %v", err)+ }
+
+ testRepo.Run(t, "tag", "-a", "v1.0", "-m", "v1.0", "HEAD")
+ testRepo.PackRefs(t, "--all", "--prune")
+ testRepo.Run(t, "checkout", "--orphan", "another")
+ testRepo.Run(t, "commit", "--allow-empty", "-m", "two")
+ testRepo.Run(t, "checkout", "-B", "main")
+ testRepo.Run(t, "branch", "-D", "another")
+ testRepo.Run(t, "reflog", "expire", "--expire=now", "--all")
+ testRepo.Run(t, "prune")
+
+ store := openFilesStore(t, testRepo, algo)
+
+ got, err := store.ResolveFully("refs/heads/main")+ if err != nil {+ t.Fatalf("ResolveFully(main): %v", err)+ }
+
+ if got.ID == oneID {+ t.Fatalf("ResolveFully(main) unexpectedly returned stale packed id %s", oneID)+ }
+
+ tagRef, err := store.Resolve("refs/tags/v1.0")+ if err != nil {+ t.Fatalf("Resolve(tag): %v", err)+ }
+
+ tagDet, ok := tagRef.(ref.Detached)
+ if !ok {+ t.Fatalf("Resolve(tag) type = %T, want ref.Detached", tagRef)+ }
+
+ if tagDet.ID.Size() == 0 {+ t.Fatal("Resolve(tag) returned zero object id")+ }
+ })
+
+ t.Run("exact unicode packed ref remains enumerable", func(t *testing.T) {+ t.Parallel()
+
+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, RefFormat: "files"})+ _, _, commitID := testRepo.MakeCommit(t, "unicode")
+ testRepo.UpdateRef(t, "refs/heads/\ue43f", commitID)
+ testRepo.UpdateRef(t, "refs/heads/z", commitID)
+ testRepo.PackRefs(t, "--all", "--prune")
+
+ store := openFilesStore(t, testRepo, algo)
+
+ listed, err := store.List("refs/heads/z")+ if err != nil {+ t.Fatalf("List(refs/heads/z): %v", err)+ }
+
+ if len(listed) != 1 {+ t.Fatalf("List(refs/heads/z) len = %d, want 1", len(listed))+ }
+
+ if listed[0].Name() != "refs/heads/z" {+ t.Fatalf("List(refs/heads/z)[0] = %q, want %q", listed[0].Name(), "refs/heads/z")+ }
+ })
+ })
+}
--- /dev/null
+++ b/refstore/files/store.go
@@ -1,0 +1,152 @@
+// Package files provides one Git files ref store with loose-over-packed reads
+// and transaction-coordinated updates.
+package files
+
+import (
+ "errors"
+ "io"
+ "math/rand"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "codeberg.org/lindenii/furgit/config"
+ "codeberg.org/lindenii/furgit/objectid"
+ "codeberg.org/lindenii/furgit/ref/refname"
+ "codeberg.org/lindenii/furgit/refstore"
+)
+
+// Store reads and writes one Git files ref namespace rooted at one repository
+// gitdir plus its commondir.
+//
+// Store owns both roots and closes them in Close.
+type Store struct {+ gitRoot *os.Root
+ commonRoot *os.Root
+ algo objectid.Algorithm
+ lockRand *rand.Rand
+
+ packedRefsTimeout time.Duration
+}
+
+var (
+ _ refstore.ReadingStore = (*Store)(nil)
+ _ refstore.TransactionalStore = (*Store)(nil)
+)
+
+type rootKind uint8
+
+const (
+ rootGit rootKind = iota
+ rootCommon
+)
+
+type refPath struct {+ root rootKind
+ path string
+}
+
+// New creates one files ref store rooted at one repository gitdir.
+func New(root *os.Root, algo objectid.Algorithm) (*Store, error) {+ if algo.Size() == 0 {+ return nil, objectid.ErrInvalidAlgorithm
+ }
+
+ commonRoot, err := openCommonRoot(root)
+ if err != nil {+ return nil, err
+ }
+
+ return &Store{+ gitRoot: root,
+ commonRoot: commonRoot,
+ algo: algo,
+ lockRand: rand.New(rand.NewSource(time.Now().UnixNano())), //nolint:gosec
+ packedRefsTimeout: detectPackedRefsTimeout(commonRoot),
+ }, nil
+}
+
+// Close releases resources associated with the store.
+func (store *Store) Close() error {+ err := store.gitRoot.Close()
+ commonErr := store.commonRoot.Close()
+
+ if err != nil {+ return err
+ }
+
+ return commonErr
+}
+
+func openCommonRoot(gitRoot *os.Root) (*os.Root, error) {+ content, err := gitRoot.ReadFile("commondir")+ if err != nil {+ if errorsIsNotExist(err) {+ return gitRoot.OpenRoot(".")+ }
+
+ return nil, err
+ }
+
+ commonDir := strings.TrimSpace(string(content))
+ if commonDir == "" {+ return nil, os.ErrNotExist
+ }
+
+ if filepath.IsAbs(commonDir) {+ return os.OpenRoot(commonDir)
+ }
+
+ // This is okay because that's how Git defines it anyway.
+ return os.OpenRoot(filepath.Join(gitRoot.Name(), commonDir))
+}
+
+func (store *Store) rootFor(kind rootKind) *os.Root {+ if kind == rootCommon {+ return store.commonRoot
+ }
+
+ return store.gitRoot
+}
+
+func (store *Store) loosePath(name string) refPath {+ parsed := refname.ParseWorktree(name)
+ switch parsed.Type {+ case refname.WorktreeCurrent:
+ return refPath{root: rootGit, path: parsed.BareRefName}+ case refname.WorktreeMain, refname.WorktreeShared:
+ return refPath{root: rootCommon, path: parsed.BareRefName}+ case refname.WorktreeOther:
+ return refPath{+ root: rootCommon,
+ path: path.Join("worktrees", parsed.WorktreeName, parsed.BareRefName),+ }
+ default:
+ return refPath{root: rootCommon, path: name}+ }
+}
+
+func detectPackedRefsTimeout(commonRoot *os.Root) time.Duration {+ const defaultTimeout = time.Second
+
+ file, err := commonRoot.Open("config")+ if err != nil {+ return defaultTimeout
+ }
+
+ defer func() { _ = file.Close() }()+
+ cfg, err := config.ParseConfig(file)
+ if err != nil && !errors.Is(err, io.EOF) {+ return defaultTimeout
+ }
+
+ timeoutValue, err := cfg.Lookup("core", "", "packedrefstimeout").Int()+ if err != nil {+ return defaultTimeout
+ }
+
+ return time.Duration(timeoutValue) * time.Millisecond
+}
--- /dev/null
+++ b/refstore/files/transaction.go
@@ -1,0 +1,262 @@
+package files
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "strings"
+
+ "codeberg.org/lindenii/furgit/objectid"
+ "codeberg.org/lindenii/furgit/ref/refname"
+ "codeberg.org/lindenii/furgit/refstore"
+)
+
+type txKind uint8
+
+const (
+ txCreate txKind = iota
+ txUpdate
+ txDelete
+ txVerify
+ txCreateSymbolic
+ txUpdateSymbolic
+ txDeleteSymbolic
+ txVerifySymbolic
+)
+
+type txOp struct {+ name string
+ kind txKind
+ newID objectid.ObjectID
+ oldID objectid.ObjectID
+ newTarget string
+ oldTarget string
+}
+
+type directKind uint8
+
+const (
+ directMissing directKind = iota
+ directDetached
+ directSymbolic
+)
+
+type directRef struct {+ kind directKind
+ name string
+ id objectid.ObjectID
+ target string
+ isLoose bool
+ isPacked bool
+}
+
+type resolvedWriteTarget struct {+ name string
+ loc refPath
+ ref directRef
+}
+
+type preparedTxOp struct {+ op txOp
+ target resolvedWriteTarget
+}
+
+type Transaction struct {+ store *Store
+ ops []txOp
+ closed bool
+}
+
+var _ refstore.Transaction = (*Transaction)(nil)
+
+// BeginTransaction creates one new files transaction.
+//
+//nolint:ireturn
+func (store *Store) BeginTransaction() (refstore.Transaction, error) {+ return &Transaction{+ store: store,
+ ops: make([]txOp, 0, 8),
+ }, nil
+}
+
+func (tx *Transaction) Create(name string, newID objectid.ObjectID) error {+ return tx.queue(txOp{name: name, kind: txCreate, newID: newID})+}
+
+func (tx *Transaction) Update(name string, newID, oldID objectid.ObjectID) error {+ return tx.queue(txOp{name: name, kind: txUpdate, newID: newID, oldID: oldID})+}
+
+func (tx *Transaction) Delete(name string, oldID objectid.ObjectID) error {+ return tx.queue(txOp{name: name, kind: txDelete, oldID: oldID})+}
+
+func (tx *Transaction) Verify(name string, oldID objectid.ObjectID) error {+ return tx.queue(txOp{name: name, kind: txVerify, oldID: oldID})+}
+
+func (tx *Transaction) CreateSymbolic(name, newTarget string) error {+ return tx.queue(txOp{name: name, kind: txCreateSymbolic, newTarget: newTarget})+}
+
+func (tx *Transaction) UpdateSymbolic(name, newTarget, oldTarget string) error {+ return tx.queue(txOp{name: name, kind: txUpdateSymbolic, newTarget: newTarget, oldTarget: oldTarget})+}
+
+func (tx *Transaction) DeleteSymbolic(name, oldTarget string) error {+ return tx.queue(txOp{name: name, kind: txDeleteSymbolic, oldTarget: oldTarget})+}
+
+func (tx *Transaction) VerifySymbolic(name, oldTarget string) error {+ return tx.queue(txOp{name: name, kind: txVerifySymbolic, oldTarget: oldTarget})+}
+
+func (tx *Transaction) Commit() error {+ err := tx.ensureOpen()
+ if err != nil {+ return err
+ }
+
+ prepared, err := tx.prepare()
+ if err != nil {+ tx.closed = true
+
+ return err
+ }
+
+ defer func() {+ _ = tx.cleanup(prepared)
+ }()
+
+ for _, item := range prepared {+ if item.op.kind == txDelete || item.op.kind == txDeleteSymbolic || item.op.kind == txVerify || item.op.kind == txVerifySymbolic {+ continue
+ }
+
+ err = tx.writeLoose(item)
+ if err != nil {+ tx.closed = true
+
+ return err
+ }
+ }
+
+ err = tx.applyPackedDeletes(prepared)
+ if err != nil {+ tx.closed = true
+
+ return err
+ }
+
+ for _, item := range prepared {+ switch item.op.kind {+ case txDelete, txDeleteSymbolic:
+ if item.target.ref.isLoose {+ err = tx.store.rootFor(item.target.loc.root).Remove(item.target.loc.path)
+ if err != nil && !errors.Is(err, os.ErrNotExist) {+ tx.closed = true
+
+ return err
+ }
+
+ tx.tryRemoveEmptyParents(item.target.name)
+ }
+ case txCreate, txUpdate, txVerify, txCreateSymbolic, txUpdateSymbolic, txVerifySymbolic:
+ }
+ }
+
+ tx.closed = true
+
+ return nil
+}
+
+func (tx *Transaction) Abort() error {+ err := tx.ensureOpen()
+ if err != nil {+ return err
+ }
+
+ tx.closed = true
+
+ return nil
+}
+
+func (tx *Transaction) ensureOpen() error {+ if tx.closed {+ return fmt.Errorf("refstore/files: transaction already closed")+ }
+
+ return nil
+}
+
+func (tx *Transaction) queue(op txOp) error {+ err := tx.ensureOpen()
+ if err != nil {+ return err
+ }
+
+ err = tx.validateOp(op)
+ if err != nil {+ return err
+ }
+
+ tx.ops = append(tx.ops, op)
+
+ return nil
+}
+
+func (tx *Transaction) validateOp(op txOp) error {+ if op.name == "" {+ return fmt.Errorf("refstore/files: empty reference name")+ }
+
+ switch op.kind {+ case txCreate, txUpdate:
+ err := refname.ValidateUpdateName(op.name, true)
+ if err != nil {+ return err
+ }
+
+ if op.newID.Size() == 0 {+ return objectid.ErrInvalidAlgorithm
+ }
+ case txDelete, txVerify:
+ err := refname.ValidateUpdateName(op.name, false)
+ if err != nil {+ return err
+ }
+
+ if op.oldID.Size() == 0 {+ return objectid.ErrInvalidAlgorithm
+ }
+ case txCreateSymbolic, txUpdateSymbolic:
+ err := refname.ValidateUpdateName(op.name, true)
+ if err != nil {+ return err
+ }
+
+ if strings.TrimSpace(op.newTarget) == "" {+ return fmt.Errorf("refstore/files: empty symbolic target")+ }
+
+ err = refname.ValidateSymbolicTarget(op.name, strings.TrimSpace(op.newTarget))
+ if err != nil {+ return err
+ }
+ case txDeleteSymbolic, txVerifySymbolic:
+ err := refname.ValidateUpdateName(op.name, false)
+ if err != nil {+ return err
+ }
+ default:
+ return fmt.Errorf("refstore/files: unsupported transaction operation %d", op.kind)+ }
+
+ if op.kind == txUpdateSymbolic || op.kind == txDeleteSymbolic || op.kind == txVerifySymbolic {+ if strings.TrimSpace(op.oldTarget) == "" {+ return fmt.Errorf("refstore/files: empty symbolic old target")+ }
+ }
+
+ return nil
+}
--- /dev/null
+++ b/refstore/files/transaction_cleanup.go
@@ -1,0 +1,120 @@
+package files
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path"
+ "slices"
+)
+
+func (tx *Transaction) cleanup(prepared []preparedTxOp) error {+ var firstErr error
+
+ lockNames := make([]string, 0, len(prepared)+1)
+ for _, item := range prepared {+ lockNames = append(lockNames, tx.targetKey(item.target.loc))
+ }
+
+ lockNames = append(lockNames, tx.targetKey(refPath{root: rootCommon, path: "packed-refs"}))+ slices.Sort(lockNames)
+ lockNames = slices.Compact(lockNames)
+
+ for _, lockKey := range lockNames {+ lockPath := refPathFromKey(lockKey)
+ lockName := lockPath.path + ".lock"
+ root := tx.store.rootFor(lockPath.root)
+
+ err := root.Remove(lockName)
+ if err == nil || errors.Is(err, os.ErrNotExist) {+ tx.tryRemoveEmptyParentPaths(lockPath.root, lockName)
+
+ continue
+ }
+
+ if firstErr == nil {+ firstErr = err
+ }
+ }
+
+ return firstErr
+}
+
+func (tx *Transaction) tryRemoveEmptyParents(name string) {+ loc := tx.store.loosePath(name)
+ tx.tryRemoveEmptyParentPaths(loc.root, loc.path)
+}
+
+func (tx *Transaction) tryRemoveEmptyParentPaths(kind rootKind, name string) {+ root := tx.store.rootFor(kind)
+ dir := path.Dir(name)
+
+ for dir != "." && dir != "/" {+ err := root.Remove(dir)
+ if err != nil {+ if errors.Is(err, os.ErrNotExist) {+ return
+ }
+
+ var pathErr *os.PathError
+ if errors.As(err, &pathErr) {+ return
+ }
+
+ return
+ }
+
+ dir = path.Dir(dir)
+ }
+}
+
+func (tx *Transaction) removeEmptyDirTree(name refPath) error {+ root := tx.store.rootFor(name.root)
+
+ info, err := root.Stat(name.path)
+ if err != nil {+ if errors.Is(err, os.ErrNotExist) {+ return nil
+ }
+
+ return err
+ }
+
+ if !info.IsDir() {+ return nil
+ }
+
+ return tx.removeEmptyDirTreeRecursive(name)
+}
+
+func (tx *Transaction) removeEmptyDirTreeRecursive(name refPath) error {+ root := tx.store.rootFor(name.root)
+
+ dir, err := root.Open(name.path)
+ if err != nil {+ return err
+ }
+
+ entries, err := dir.ReadDir(-1)
+ _ = dir.Close()
+
+ if err != nil {+ return err
+ }
+
+ for _, entry := range entries {+ if !entry.IsDir() {+ return fmt.Errorf("refstore/files: non-empty directory blocks reference %q", name.path)+ }
+
+ err = tx.removeEmptyDirTreeRecursive(refPath{+ root: name.root,
+ path: path.Join(name.path, entry.Name()),
+ })
+ if err != nil {+ return err
+ }
+ }
+
+ return root.Remove(name.path)
+}
--- /dev/null
+++ b/refstore/files/transaction_dirs_test.go
@@ -1,0 +1,220 @@
+package files_test
+
+import (
+ "testing"
+
+ "codeberg.org/lindenii/furgit/internal/testgit"
+ "codeberg.org/lindenii/furgit/objectid"
+)
+
+func TestFilesTransactionEmptyDirectoriesDoNotBlock(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true})+ _, _, oldID := testRepo.MakeCommit(t, "old")
+ _, _, newID := testRepo.MakeCommit(t, "new")
+
+ testRepo.UpdateRef(t, "refs/e-verify/foo", oldID)
+ testRepo.PackRefs(t, "--all", "--prune")
+ testRepo.WriteFileAll(t, "refs/e-verify/foo/bar/baz/.keep", []byte{}, 0o755, 0o644)+ testRepo.Remove(t, "refs/e-verify/foo/bar/baz/.keep")
+
+ store := openFilesStore(t, testRepo, algo)
+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(verify): %v", err)+ }
+
+ err = tx.Verify("refs/e-verify/foo", oldID)+ if err != nil {+ t.Fatalf("Verify with empty directories: %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(verify with empty directories): %v", err)+ }
+
+ testRepo.UpdateRef(t, "refs/e-update/foo", oldID)
+ testRepo.PackRefs(t, "--all", "--prune")
+ testRepo.WriteFileAll(t, "refs/e-update/foo/bar/baz/.keep", []byte{}, 0o755, 0o644)+ testRepo.Remove(t, "refs/e-update/foo/bar/baz/.keep")
+
+ tx, err = store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(update): %v", err)+ }
+
+ err = tx.Update("refs/e-update/foo", newID, oldID)+ if err != nil {+ t.Fatalf("Update with empty directories: %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(update with empty directories): %v", err)+ }
+
+ got, err := store.ResolveFully("refs/e-update/foo")+ if err != nil {+ t.Fatalf("ResolveFully(updated foo): %v", err)+ }
+
+ if got.ID != newID {+ t.Fatalf("updated foo = %s, want %s", got.ID, newID)+ }
+
+ testRepo.WriteFileAll(t, "refs/e-create/foo/bar/baz/.keep", []byte{}, 0o755, 0o644)+ testRepo.Remove(t, "refs/e-create/foo/bar/baz/.keep")
+
+ tx, err = store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(create): %v", err)+ }
+
+ err = tx.Create("refs/e-create/foo", oldID)+ if err != nil {+ t.Fatalf("Create with empty directories: %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(create with empty directories): %v", err)+ }
+
+ got, err = store.ResolveFully("refs/e-create/foo")+ if err != nil {+ t.Fatalf("ResolveFully(created foo): %v", err)+ }
+
+ if got.ID != oldID {+ t.Fatalf("created foo = %s, want %s", got.ID, oldID)+ }
+
+ testRepo.UpdateRef(t, "refs/e-delete/foo", oldID)
+ testRepo.PackRefs(t, "--all", "--prune")
+ testRepo.WriteFileAll(t, "refs/e-delete/foo/bar/baz/.keep", []byte{}, 0o755, 0o644)+ testRepo.Remove(t, "refs/e-delete/foo/bar/baz/.keep")
+
+ tx, err = store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(delete): %v", err)+ }
+
+ err = tx.Delete("refs/e-delete/foo", oldID)+ if err != nil {+ t.Fatalf("Delete with empty directories: %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(delete with empty directories): %v", err)+ }
+ })
+}
+
+func TestFilesTransactionNonEmptyDirectoryAndBrokenRefBlockCreate(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true})+ _, _, commitID := testRepo.MakeCommit(t, "base")
+ store := openFilesStore(t, testRepo, algo)
+
+ testRepo.WriteFileAll(t, "refs/ne-create/foo/bar/baz.lock", []byte(""), 0o755, 0o644)+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(non-empty dir): %v", err)+ }
+
+ err = tx.Create("refs/ne-create/foo", commitID)+ if err != nil {+ t.Fatalf("Create(non-empty dir) queue: %v", err)+ }
+
+ err = tx.Commit()
+ if err == nil {+ t.Fatal("Commit(non-empty dir) unexpectedly succeeded")+ }
+
+ testRepo.WriteFileAll(t, "refs/broken/foo", []byte("gobbledigook\n"), 0o755, 0o644)+
+ tx, err = store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(broken ref): %v", err)+ }
+
+ err = tx.Create("refs/broken/foo", commitID)+ if err != nil {+ t.Fatalf("Create(broken ref) queue: %v", err)+ }
+
+ err = tx.Commit()
+ if err == nil {+ t.Fatal("Commit(broken ref) unexpectedly succeeded")+ }
+ })
+}
+
+func TestFilesTransactionIndirectCreateMatchesGit(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ t.Run("non-empty directory blocks", func(t *testing.T) {+ t.Parallel()
+
+ repo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, RefFormat: "files"})+ _, _, innerID := repo.MakeCommit(t, "inner")
+ prefix := "refs/ne-indirect-create"
+
+ repo.SymbolicRef(t, prefix+"/symref", prefix+"/foo")
+ repo.WriteFileAll(t, ".git/"+prefix+"/foo/bar/baz.lock", []byte{}, 0o755, 0o644)+ store := openFilesStore(t, repo, algo)
+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(non-empty): %v", err)+ }
+
+ err = tx.Create(prefix+"/symref", innerID)
+ if err != nil {+ t.Fatalf("Create(non-empty) queue: %v", err)+ }
+
+ err = tx.Commit()
+ if err == nil {+ t.Fatal("Commit(non-empty) unexpectedly succeeded")+ }
+ })
+
+ t.Run("broken referent blocks", func(t *testing.T) {+ t.Parallel()
+
+ repo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, RefFormat: "files"})+ _, _, commitID := repo.MakeCommit(t, "broken")
+ prefix := "refs/broken-indirect-create"
+
+ repo.SymbolicRef(t, prefix+"/symref", prefix+"/foo")
+ repo.WriteFileAll(t, ".git/"+prefix+"/foo", []byte("gobbledigook\n"), 0o755, 0o644)+ store := openFilesStore(t, repo, algo)
+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(broken): %v", err)+ }
+
+ err = tx.Create(prefix+"/symref", commitID)
+ if err != nil {+ t.Fatalf("Create(broken) queue: %v", err)+ }
+
+ err = tx.Commit()
+ if err == nil {+ t.Fatal("Commit(broken) unexpectedly succeeded")+ }
+ })
+ })
+}
--- /dev/null
+++ b/refstore/files/transaction_lock.go
@@ -1,0 +1,84 @@
+package files
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path"
+ "strings"
+ "time"
+)
+
+func (tx *Transaction) createLock(name refPath) error {+ root := tx.store.rootFor(name.root)
+ dir := path.Dir(name.path)
+
+ if dir != "." {+ err := root.MkdirAll(dir, 0o755)
+ if err != nil {+ return err
+ }
+ }
+
+ file, err := root.OpenFile(name.path+".lock", os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0o644)
+ if err != nil {+ return err
+ }
+
+ return file.Close()
+}
+
+func (tx *Transaction) createPackedLock() error {+ const (
+ initialBackoffMs = 1
+ backoffMaxMultiplier = 1000
+ )
+
+ timeout := tx.store.packedRefsTimeout
+ deadline := time.Now().Add(timeout)
+ multiplier := 1
+ n := 1
+
+ for {+ file, err := tx.store.commonRoot.OpenFile("packed-refs.lock", os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0o644)+ if err == nil {+ return file.Close()
+ }
+
+ if !errors.Is(err, os.ErrExist) {+ return err
+ }
+
+ if timeout == 0 || (timeout > 0 && time.Now().After(deadline)) {+ return err
+ }
+
+ backoffMs := multiplier * initialBackoffMs
+ waitMs := (750 + tx.store.lockRand.Intn(500)) * backoffMs / 1000
+ time.Sleep(time.Duration(waitMs) * time.Millisecond)
+
+ multiplier += 2*n + 1
+ if multiplier > backoffMaxMultiplier {+ multiplier = backoffMaxMultiplier
+ } else {+ n++
+ }
+ }
+}
+
+func (tx *Transaction) targetKey(name refPath) string {+ return fmt.Sprintf("%d:%s", name.root, name.path)+}
+
+func refPathFromKey(key string) refPath {+ rootValue, pathValue, ok := strings.Cut(key, ":")
+ if !ok || rootValue == "" {+ return refPath{root: rootCommon, path: key}+ }
+
+ if rootValue == "0" {+ return refPath{root: rootGit, path: pathValue}+ }
+
+ return refPath{root: rootCommon, path: pathValue}+}
--- /dev/null
+++ b/refstore/files/transaction_names_test.go
@@ -1,0 +1,188 @@
+package files_test
+
+import (
+ "testing"
+
+ "codeberg.org/lindenii/furgit/internal/testgit"
+ "codeberg.org/lindenii/furgit/objectid"
+ "codeberg.org/lindenii/furgit/ref"
+ "codeberg.org/lindenii/furgit/refstore"
+)
+
+func TestFilesTransactionValidateUpdateNames(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true})+ _, _, commitID := testRepo.MakeCommit(t, "base")
+
+ store := openFilesStore(t, testRepo, algo)
+
+ tests := []struct {+ name string
+ queue func(refstore.Transaction) error
+ wantErr bool
+ }{+ {+ name: "create refs/heads/main",
+ queue: func(tx refstore.Transaction) error {+ return tx.Create("refs/heads/main", commitID)+ },
+ },
+ {+ name: "create foo/bar",
+ queue: func(tx refstore.Transaction) error {+ return tx.Create("foo/bar", commitID)+ },
+ },
+ {+ name: "create FETCH_HEAD",
+ queue: func(tx refstore.Transaction) error {+ return tx.Create("FETCH_HEAD", commitID)+ },
+ wantErr: true,
+ },
+ {+ name: "create MERGE_HEAD",
+ queue: func(tx refstore.Transaction) error {+ return tx.Create("MERGE_HEAD", commitID)+ },
+ wantErr: true,
+ },
+ {+ name: "create bad refname",
+ queue: func(tx refstore.Transaction) error {+ return tx.Create("refs/heads/.bad", commitID)+ },
+ wantErr: true,
+ },
+ {+ name: "verify unsafe delete-style name",
+ queue: func(tx refstore.Transaction) error {+ return tx.Verify("foo/bar", commitID)+ },
+ wantErr: true,
+ },
+ {+ name: "verify pseudoref-style name",
+ queue: func(tx refstore.Transaction) error {+ return tx.Verify("PSEUDOREF", commitID)+ },
+ wantErr: false,
+ },
+ }
+
+ for _, tt := range tests {+ t.Run(tt.name, func(t *testing.T) {+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction: %v", err)+ }
+
+ err = tt.queue(tx)
+ if (err != nil) != tt.wantErr {+ t.Fatalf("queue err=%v, wantErr=%v", err, tt.wantErr)+ }
+
+ _ = tx.Abort()
+ })
+ }
+ })
+}
+
+func TestFilesTransactionSymbolicTargetRules(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true})+ _, _, mainID := testRepo.MakeCommit(t, "main")
+ testRepo.UpdateRef(t, "refs/heads/main", mainID)
+ testRepo.UpdateRef(t, "ORIG_HEAD", mainID)
+
+ store := openFilesStore(t, testRepo, algo)
+
+ tests := []struct {+ name string
+ queue func(refstore.Transaction) error
+ wantErr bool
+ }{+ {+ name: "head requires branch target",
+ queue: func(tx refstore.Transaction) error {+ return tx.CreateSymbolic("HEAD", "foo")+ },
+ wantErr: true,
+ },
+ {+ name: "head accepts refs/heads target",
+ queue: func(tx refstore.Transaction) error {+ return tx.CreateSymbolic("HEAD", "refs/heads/main")+ },
+ },
+ {+ name: "non-head allows top-level target",
+ queue: func(tx refstore.Transaction) error {+ return tx.CreateSymbolic("refs/heads/top-level", "ORIG_HEAD")+ },
+ },
+ {+ name: "non-head rejects invalid target",
+ queue: func(tx refstore.Transaction) error {+ return tx.CreateSymbolic("refs/heads/invalid", "foo..bar")+ },
+ wantErr: true,
+ },
+ {+ name: "non-head allows worktree target",
+ queue: func(tx refstore.Transaction) error {+ return tx.CreateSymbolic("refs/heads/worktree-target", "worktrees/wt1/HEAD")+ },
+ },
+ }
+
+ for _, tt := range tests {+ t.Run(tt.name, func(t *testing.T) {+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction: %v", err)+ }
+
+ err = tt.queue(tx)
+ if (err != nil) != tt.wantErr {+ t.Fatalf("queue err=%v, wantErr=%v", err, tt.wantErr)+ }
+
+ _ = tx.Abort()
+ })
+ }
+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(final symbolic): %v", err)+ }
+
+ err = tx.CreateSymbolic("refs/heads/top-level", "ORIG_HEAD")+ if err != nil {+ t.Fatalf("CreateSymbolic(top-level): %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(CreateSymbolic top-level): %v", err)+ }
+
+ got, err := store.Resolve("refs/heads/top-level")+ if err != nil {+ t.Fatalf("Resolve(top-level): %v", err)+ }
+
+ sym, ok := got.(ref.Symbolic)
+ if !ok {+ t.Fatalf("Resolve(top-level) type = %T, want ref.Symbolic", got)+ }
+
+ if sym.Target != "ORIG_HEAD" {+ t.Fatalf("top-level target = %q, want %q", sym.Target, "ORIG_HEAD")+ }
+ })
+}
--- /dev/null
+++ b/refstore/files/transaction_prepare.go
@@ -1,0 +1,292 @@
+package files
+
+import (
+ "errors"
+ "fmt"
+ "slices"
+ "strings"
+
+ "codeberg.org/lindenii/furgit/ref"
+ "codeberg.org/lindenii/furgit/ref/refname"
+ "codeberg.org/lindenii/furgit/refstore"
+)
+
+func (tx *Transaction) prepare() (prepared []preparedTxOp, err error) {+ prepared = make([]preparedTxOp, 0, len(tx.ops))
+
+ defer func() {+ if err != nil {+ _ = tx.cleanup(prepared)
+ }
+ }()
+
+ targets := make(map[string]struct{}, len(tx.ops))+
+ for _, op := range tx.ops {+ target, err := tx.resolveTarget(op)
+ if err != nil {+ return prepared, err
+ }
+
+ targetKey := tx.targetKey(target.loc)
+ if _, exists := targets[targetKey]; exists {+ return prepared, fmt.Errorf("refstore/files: duplicate transaction operation for %q", target.name)+ }
+
+ targets[targetKey] = struct{}{}+
+ prepared = append(prepared, preparedTxOp{+ op: op,
+ target: target,
+ })
+ }
+
+ deleted := make(map[string]struct{})+ written := make([]string, 0, len(prepared))
+
+ for _, item := range prepared {+ switch item.op.kind {+ case txDelete, txDeleteSymbolic:
+ deleted[item.target.name] = struct{}{}+ case txCreate, txUpdate, txCreateSymbolic, txUpdateSymbolic:
+ written = append(written, item.target.name)
+ case txVerify, txVerifySymbolic:
+ }
+ }
+
+ existing, err := tx.visibleNames()
+ if err != nil {+ return prepared, err
+ }
+
+ for _, name := range written {+ err = verifyRefnameAvailable(name, existing, written, deleted)
+ if err != nil {+ return prepared, err
+ }
+ }
+
+ lockNames := make([]string, 0, len(prepared))
+ for _, item := range prepared {+ lockNames = append(lockNames, tx.targetKey(item.target.loc))
+ }
+
+ slices.Sort(lockNames)
+
+ for _, lockKey := range lockNames {+ err = tx.createLock(refPathFromKey(lockKey))
+ if err != nil {+ return prepared, err
+ }
+ }
+
+ hasDeletes := len(deleted) > 0
+ if hasDeletes {+ err = tx.createPackedLock()
+ if err != nil {+ return prepared, err
+ }
+ }
+
+ for i := range prepared {+ item := &prepared[i]
+
+ refState, err := tx.directRead(item.target.name)
+ if err != nil {+ return prepared, err
+ }
+
+ item.target.ref = refState
+
+ err = tx.verifyCurrent(*item)
+ if err != nil {+ return prepared, err
+ }
+ }
+
+ return prepared, nil
+}
+
+func (tx *Transaction) resolveTarget(op txOp) (resolvedWriteTarget, error) {+ switch op.kind {+ case txCreate:
+ return tx.resolveOrdinaryTarget(op.name, true)
+ case txUpdate, txDelete, txVerify:
+ return tx.resolveOrdinaryTarget(op.name, false)
+ case txCreateSymbolic, txUpdateSymbolic, txDeleteSymbolic, txVerifySymbolic:
+ refState, err := tx.directRead(op.name)
+ if err != nil {+ return resolvedWriteTarget{}, err+ }
+
+ return resolvedWriteTarget{name: op.name, loc: tx.store.loosePath(op.name), ref: refState}, nil+ default:
+ return resolvedWriteTarget{}, fmt.Errorf("refstore/files: unsupported transaction operation %d", op.kind)+ }
+}
+
+func (tx *Transaction) resolveOrdinaryTarget(name string, allowMissing bool) (resolvedWriteTarget, error) {+ cur := name
+ seen := make(map[string]struct{})+
+ for {+ if _, ok := seen[cur]; ok {+ return resolvedWriteTarget{}, fmt.Errorf("refstore/files: symbolic reference cycle at %q", cur)+ }
+
+ seen[cur] = struct{}{}+
+ refState, err := tx.directRead(cur)
+ if err != nil {+ return resolvedWriteTarget{}, err+ }
+
+ switch refState.kind {+ case directMissing:
+ if !allowMissing {+ return resolvedWriteTarget{}, refstore.ErrReferenceNotFound+ }
+
+ return resolvedWriteTarget{name: cur, loc: tx.store.loosePath(cur), ref: refState}, nil+ case directDetached:
+ return resolvedWriteTarget{name: cur, loc: tx.store.loosePath(cur), ref: refState}, nil+ case directSymbolic:
+ target := strings.TrimSpace(refState.target)
+ if target == "" {+ return resolvedWriteTarget{}, fmt.Errorf("refstore/files: symbolic reference %q has empty target", cur)+ }
+
+ cur = target
+ default:
+ return resolvedWriteTarget{}, fmt.Errorf("refstore/files: unsupported direct reference state %d", refState.kind)+ }
+ }
+}
+
+func (tx *Transaction) directRead(name string) (directRef, error) {+ loc := tx.store.loosePath(name)
+ hasPacked := false
+
+ if loc.root == rootCommon && refname.ParseWorktree(name).Type == refname.WorktreeShared {+ packed, packedErr := tx.store.readPackedRefs()
+ if packedErr != nil {+ return directRef{}, packedErr+ }
+
+ _, hasPacked = packed.byName[name]
+ }
+
+ loose, err := tx.store.readLooseRef(name)
+ if err == nil {+ switch loose := loose.(type) {+ case ref.Detached:
+ return directRef{+ kind: directDetached,
+ name: name,
+ id: loose.ID,
+ isLoose: true,
+ isPacked: hasPacked,
+ }, nil
+ case ref.Symbolic:
+ return directRef{+ kind: directSymbolic,
+ name: name,
+ target: loose.Target,
+ isLoose: true,
+ isPacked: hasPacked,
+ }, nil
+ default:
+ return directRef{}, fmt.Errorf("refstore/files: unsupported reference type %T", loose)+ }
+ }
+
+ if !errors.Is(err, refstore.ErrReferenceNotFound) {+ info, statErr := tx.store.rootFor(loc.root).Stat(loc.path)
+ if statErr != nil || !info.IsDir() {+ return directRef{}, err+ }
+ }
+
+ if hasPacked {+ packed, packedErr := tx.store.readPackedRefs()
+ if packedErr != nil {+ return directRef{}, packedErr+ }
+
+ detached := packed.byName[name]
+
+ return directRef{+ kind: directDetached,
+ name: name,
+ id: detached.ID,
+ isPacked: true,
+ }, nil
+ }
+
+ return directRef{+ kind: directMissing,
+ name: name,
+ }, nil
+}
+
+func (tx *Transaction) visibleNames() (map[string]struct{}, error) {+ names := make(map[string]struct{})+
+ looseNames, err := tx.store.collectLooseRefNames()
+ if err != nil {+ return nil, err
+ }
+
+ for _, name := range looseNames {+ names[name] = struct{}{}+ }
+
+ packed, err := tx.store.readPackedRefs()
+ if err != nil {+ return nil, err
+ }
+
+ for name := range packed.byName {+ if _, exists := names[name]; exists {+ continue
+ }
+
+ names[name] = struct{}{}+ }
+
+ return names, nil
+}
+
+func verifyRefnameAvailable(name string, existing map[string]struct{}, writes []string, deleted map[string]struct{}) error {+ for existingName := range existing {+ if existingName == name {+ continue
+ }
+
+ if _, skip := deleted[existingName]; skip {+ continue
+ }
+
+ if refnamesConflict(name, existingName) {+ return fmt.Errorf("refstore/files: reference name conflict between %q and %q", name, existingName)+ }
+ }
+
+ for _, other := range writes {+ if other == name {+ continue
+ }
+
+ if refnamesConflict(name, other) {+ return fmt.Errorf("refstore/files: reference name conflict between %q and %q", name, other)+ }
+ }
+
+ return nil
+}
+
+func refnamesConflict(left, right string) bool {+ return left == right ||
+ strings.HasPrefix(left, right+"/") ||
+ strings.HasPrefix(right, left+"/")
+}
--- /dev/null
+++ b/refstore/files/transaction_pseudoref_test.go
@@ -1,0 +1,106 @@
+package files_test
+
+import (
+ "errors"
+ "testing"
+
+ "codeberg.org/lindenii/furgit/internal/testgit"
+ "codeberg.org/lindenii/furgit/objectid"
+ "codeberg.org/lindenii/furgit/refstore"
+)
+
+func TestFilesTransactionPseudorefLifecycle(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true})+ _, _, aID := testRepo.MakeCommit(t, "A")
+ _, _, bID := testRepo.MakeCommit(t, "B")
+ _, _, cID := testRepo.MakeCommit(t, "C")
+
+ store := openFilesStore(t, testRepo, algo)
+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(create): %v", err)+ }
+
+ err = tx.Create("PSEUDOREF", aID)+ if err != nil {+ t.Fatalf("Create(PSEUDOREF): %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(create PSEUDOREF): %v", err)+ }
+
+ got, err := store.ResolveFully("PSEUDOREF")+ if err != nil {+ t.Fatalf("ResolveFully(PSEUDOREF): %v", err)+ }
+
+ if got.ID != aID {+ t.Fatalf("PSEUDOREF after create = %s, want %s", got.ID, aID)+ }
+
+ tx, err = store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(update): %v", err)+ }
+
+ err = tx.Update("PSEUDOREF", bID, aID)+ if err != nil {+ t.Fatalf("Update(PSEUDOREF): %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(update PSEUDOREF): %v", err)+ }
+
+ got, err = store.ResolveFully("PSEUDOREF")+ if err != nil {+ t.Fatalf("ResolveFully(PSEUDOREF) after update: %v", err)+ }
+
+ if got.ID != bID {+ t.Fatalf("PSEUDOREF after update = %s, want %s", got.ID, bID)+ }
+
+ tx, err = store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(stale update): %v", err)+ }
+
+ err = tx.Update("PSEUDOREF", cID, aID)+ if err != nil {+ t.Fatalf("queue stale update: %v", err)+ }
+
+ err = tx.Commit()
+ if err == nil {+ t.Fatal("stale pseudoref update unexpectedly succeeded")+ }
+
+ tx, err = store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(delete): %v", err)+ }
+
+ err = tx.Delete("PSEUDOREF", bID)+ if err != nil {+ t.Fatalf("Delete(PSEUDOREF): %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(delete PSEUDOREF): %v", err)+ }
+
+ _, err = store.Resolve("PSEUDOREF")+ if !errors.Is(err, refstore.ErrReferenceNotFound) {+ t.Fatalf("Resolve(PSEUDOREF after delete) err=%v", err)+ }
+ })
+}
--- /dev/null
+++ b/refstore/files/transaction_symbolic_test.go
@@ -1,0 +1,154 @@
+package files_test
+
+import (
+ "errors"
+ "testing"
+
+ "codeberg.org/lindenii/furgit/internal/testgit"
+ "codeberg.org/lindenii/furgit/objectid"
+ "codeberg.org/lindenii/furgit/refstore"
+)
+
+func TestFilesTransactionDirectSymbolicDeletes(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true})+ _, _, mainID := testRepo.MakeCommit(t, "main")
+ testRepo.UpdateRef(t, "refs/heads/main", mainID)
+
+ store := openFilesStore(t, testRepo, algo)
+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(create symref): %v", err)+ }
+
+ err = tx.CreateSymbolic("SYMREF", "refs/heads/main")+ if err != nil {+ t.Fatalf("CreateSymbolic(SYMREF): %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(CreateSymbolic SYMREF): %v", err)+ }
+
+ tx, err = store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(delete symref): %v", err)+ }
+
+ err = tx.DeleteSymbolic("SYMREF", "refs/heads/main")+ if err != nil {+ t.Fatalf("DeleteSymbolic(SYMREF): %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(DeleteSymbolic SYMREF): %v", err)+ }
+
+ _, err = store.Resolve("SYMREF")+ if !errors.Is(err, refstore.ErrReferenceNotFound) {+ t.Fatalf("Resolve(SYMREF after delete) err=%v", err)+ }
+
+ got, err := store.ResolveFully("refs/heads/main")+ if err != nil {+ t.Fatalf("ResolveFully(main): %v", err)+ }
+
+ if got.ID != mainID {+ t.Fatalf("main after DeleteSymbolic = %s, want %s", got.ID, mainID)+ }
+ })
+}
+
+func TestFilesTransactionSelfAndDanglingSymrefs(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true})+ _, _, mainID := testRepo.MakeCommit(t, "main")
+ testRepo.UpdateRef(t, "refs/heads/main", mainID)
+
+ store := openFilesStore(t, testRepo, algo)
+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(create self): %v", err)+ }
+
+ err = tx.CreateSymbolic("refs/heads/self", "refs/heads/self")+ if err != nil {+ t.Fatalf("CreateSymbolic(self): %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(CreateSymbolic self): %v", err)+ }
+
+ tx, err = store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(delete logical self): %v", err)+ }
+
+ err = tx.Delete("refs/heads/self", mainID)+ if err == nil {+ err = tx.Commit()
+ } else {+ _ = tx.Abort()
+ }
+
+ if err == nil {+ t.Fatal("Delete(self) unexpectedly succeeded")+ }
+
+ tx, err = store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(delete symbolic self): %v", err)+ }
+
+ err = tx.DeleteSymbolic("refs/heads/self", "refs/heads/self")+ if err != nil {+ t.Fatalf("DeleteSymbolic(self): %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(DeleteSymbolic self): %v", err)+ }
+
+ tx, err = store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(create dangling): %v", err)+ }
+
+ err = tx.CreateSymbolic("refs/heads/dangling", "refs/heads/missing")+ if err != nil {+ t.Fatalf("CreateSymbolic(dangling): %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(CreateSymbolic dangling): %v", err)+ }
+
+ tx, err = store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(delete dangling): %v", err)+ }
+
+ err = tx.DeleteSymbolic("refs/heads/dangling", "refs/heads/missing")+ if err != nil {+ t.Fatalf("DeleteSymbolic(dangling): %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(DeleteSymbolic dangling): %v", err)+ }
+ })
+}
--- /dev/null
+++ b/refstore/files/transaction_update_test.go
@@ -1,0 +1,178 @@
+package files_test
+
+import (
+ "errors"
+ "strings"
+ "testing"
+
+ "codeberg.org/lindenii/furgit/internal/testgit"
+ "codeberg.org/lindenii/furgit/objectid"
+ "codeberg.org/lindenii/furgit/ref"
+ "codeberg.org/lindenii/furgit/refstore"
+)
+
+func TestFilesTransactionPackedUpdateCreatesLooseOverride(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true})+ _, _, oldID := testRepo.MakeCommit(t, "old packed")
+ _, _, newID := testRepo.MakeCommit(t, "new loose")
+ testRepo.UpdateRef(t, "refs/heads/main", oldID)
+ testRepo.PackRefs(t, "--all", "--prune")
+
+ store := openFilesStore(t, testRepo, algo)
+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction: %v", err)+ }
+
+ err = tx.Update("refs/heads/main", newID, oldID)+ if err != nil {+ t.Fatalf("Update queue: %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit: %v", err)+ }
+
+ got, err := store.ResolveFully("refs/heads/main")+ if err != nil {+ t.Fatalf("ResolveFully(main): %v", err)+ }
+
+ if got.ID != newID {+ t.Fatalf("ResolveFully(main) = %s, want %s", got.ID, newID)+ }
+
+ packedRefs := string(testRepo.ReadFile(t, "packed-refs"))
+ if !strings.Contains(packedRefs, oldID.String()+" refs/heads/main\n") {+ t.Fatalf("packed-refs lost old packed main entry:\n%s", packedRefs)+ }
+
+ looseMain := string(testRepo.ReadFile(t, "refs/heads/main"))
+ if strings.TrimSpace(looseMain) != newID.String() {+ t.Fatalf("loose refs/heads/main = %q, want %q", strings.TrimSpace(looseMain), newID.String())+ }
+ })
+}
+
+func TestFilesTransactionDeletesPackedAndLooseRefs(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true})+ _, _, packedOnlyID := testRepo.MakeCommit(t, "packed only")
+ _, _, bothID := testRepo.MakeCommit(t, "both")
+ testRepo.UpdateRef(t, "refs/heads/packed", packedOnlyID)
+ testRepo.UpdateRef(t, "refs/heads/both", bothID)
+ testRepo.PackRefs(t, "--all", "--prune")
+ testRepo.UpdateRef(t, "refs/heads/both", bothID)
+
+ store := openFilesStore(t, testRepo, algo)
+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction: %v", err)+ }
+
+ err = tx.Delete("refs/heads/packed", packedOnlyID)+ if err != nil {+ t.Fatalf("Delete(packed): %v", err)+ }
+
+ err = tx.Delete("refs/heads/both", bothID)+ if err != nil {+ t.Fatalf("Delete(both): %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(delete): %v", err)+ }
+
+ _, err = store.Resolve("refs/heads/packed")+ if !errors.Is(err, refstore.ErrReferenceNotFound) {+ t.Fatalf("Resolve(packed after delete) error = %v", err)+ }
+
+ _, err = store.Resolve("refs/heads/both")+ if !errors.Is(err, refstore.ErrReferenceNotFound) {+ t.Fatalf("Resolve(both after delete) error = %v", err)+ }
+
+ packedRefs := string(testRepo.ReadFile(t, "packed-refs"))
+ if strings.Contains(packedRefs, "refs/heads/packed\n") || strings.Contains(packedRefs, "refs/heads/both\n") {+ t.Fatalf("packed-refs still contains deleted refs:\n%s", packedRefs)+ }
+ })
+}
+
+func TestFilesTransactionDerefAndDirectSymbolic(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, Bare: true})+ _, _, firstID := testRepo.MakeCommit(t, "first")
+ _, _, secondID := testRepo.MakeCommit(t, "second")
+ testRepo.UpdateRef(t, "refs/heads/main", firstID)
+ testRepo.SymbolicRef(t, "HEAD", "refs/heads/main")
+
+ store := openFilesStore(t, testRepo, algo)
+
+ tx, err := store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(update): %v", err)+ }
+
+ err = tx.Update("HEAD", secondID, firstID)+ if err != nil {+ t.Fatalf("Update(HEAD): %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(update HEAD): %v", err)+ }
+
+ mainRef, err := store.ResolveFully("refs/heads/main")+ if err != nil {+ t.Fatalf("ResolveFully(main): %v", err)+ }
+
+ if mainRef.ID != secondID {+ t.Fatalf("main after Update(HEAD) = %s, want %s", mainRef.ID, secondID)+ }
+
+ tx, err = store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(update symbolic): %v", err)+ }
+
+ err = tx.UpdateSymbolic("HEAD", "refs/heads/next", "refs/heads/main")+ if err != nil {+ t.Fatalf("UpdateSymbolic(HEAD): %v", err)+ }
+
+ err = tx.Commit()
+ if err != nil {+ t.Fatalf("Commit(update symbolic HEAD): %v", err)+ }
+
+ headRef, err := store.Resolve("HEAD")+ if err != nil {+ t.Fatalf("Resolve(HEAD): %v", err)+ }
+
+ headSym, ok := headRef.(ref.Symbolic)
+ if !ok {+ t.Fatalf("Resolve(HEAD) type = %T, want ref.Symbolic", headRef)+ }
+
+ if headSym.Target != "refs/heads/next" {+ t.Fatalf("HEAD target = %q, want %q", headSym.Target, "refs/heads/next")+ }
+ })
+}
--- /dev/null
+++ b/refstore/files/transaction_write.go
@@ -1,0 +1,199 @@
+package files
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path"
+ "strings"
+)
+
+func (tx *Transaction) verifyCurrent(item preparedTxOp) error {+ switch item.op.kind {+ case txCreate:
+ if item.target.ref.kind != directMissing {+ return fmt.Errorf("refstore/files: reference %q already exists", item.target.name)+ }
+
+ return nil
+ case txUpdate, txDelete, txVerify:
+ if item.target.ref.kind == directMissing {+ return fmt.Errorf("refstore/files: reference %q is missing", item.target.name)+ }
+
+ if item.target.ref.kind != directDetached {+ return fmt.Errorf("refstore/files: reference %q is not detached", item.target.name)+ }
+
+ if item.target.ref.id != item.op.oldID {+ return fmt.Errorf("refstore/files: reference %q is at %s but expected %s", item.target.name, item.target.ref.id, item.op.oldID)+ }
+
+ return nil
+ case txCreateSymbolic:
+ if item.target.ref.kind != directMissing {+ return fmt.Errorf("refstore/files: reference %q already exists", item.target.name)+ }
+
+ return nil
+ case txUpdateSymbolic, txDeleteSymbolic, txVerifySymbolic:
+ if item.target.ref.kind == directMissing {+ return fmt.Errorf("refstore/files: symbolic reference %q is missing", item.target.name)+ }
+
+ if item.target.ref.kind != directSymbolic {+ return fmt.Errorf("refstore/files: reference %q is not symbolic", item.target.name)+ }
+
+ if strings.TrimSpace(item.target.ref.target) != strings.TrimSpace(item.op.oldTarget) {+ return fmt.Errorf("refstore/files: reference %q points at %q, expected %q", item.target.name, item.target.ref.target, item.op.oldTarget)+ }
+
+ return nil
+ default:
+ return fmt.Errorf("refstore/files: unsupported transaction operation %d", item.op.kind)+ }
+}
+
+func (tx *Transaction) writeLoose(item preparedTxOp) error {+ root := tx.store.rootFor(item.target.loc.root)
+ lockName := item.target.loc.path + ".lock"
+
+ lock, err := root.OpenFile(lockName, os.O_WRONLY|os.O_TRUNC, 0o644)
+ if err != nil {+ return err
+ }
+
+ var content string
+
+ switch item.op.kind {+ case txCreate, txUpdate:
+ content = item.op.newID.String() + "\n"
+ case txCreateSymbolic, txUpdateSymbolic:
+ content = "ref: " + strings.TrimSpace(item.op.newTarget) + "\n"
+ case txDelete, txVerify, txDeleteSymbolic, txVerifySymbolic:
+ default:
+ _ = lock.Close()
+
+ return fmt.Errorf("refstore/files: unsupported write operation %d", item.op.kind)+ }
+
+ _, err = lock.WriteString(content)
+ if err != nil {+ _ = lock.Close()
+
+ return err
+ }
+
+ err = lock.Close()
+ if err != nil {+ return err
+ }
+
+ dir := path.Dir(item.target.loc.path)
+ if dir != "." {+ err = root.MkdirAll(dir, 0o755)
+ if err != nil {+ return err
+ }
+ }
+
+ err = tx.removeEmptyDirTree(item.target.loc)
+ if err != nil {+ return err
+ }
+
+ return root.Rename(lockName, item.target.loc.path)
+}
+
+func (tx *Transaction) applyPackedDeletes(prepared []preparedTxOp) error {+ _, err := tx.store.commonRoot.Stat("packed-refs.lock")+ if err != nil {+ if errors.Is(err, os.ErrNotExist) {+ return nil
+ }
+
+ return err
+ }
+
+ packed, err := tx.store.readPackedRefs()
+ if err != nil {+ return err
+ }
+
+ deleted := make(map[string]struct{})+ needed := false
+
+ for _, item := range prepared {+ if item.op.kind != txDelete && item.op.kind != txDeleteSymbolic {+ continue
+ }
+
+ deleted[item.target.name] = struct{}{}+ if item.target.ref.isPacked {+ needed = true
+ }
+ }
+
+ if !needed {+ return nil
+ }
+
+ lock, err := tx.store.commonRoot.OpenFile("packed-refs.new", os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0o644)+ if err != nil {+ return err
+ }
+
+ createdTemp := true
+
+ defer func() {+ if !createdTemp {+ return
+ }
+
+ _ = tx.store.commonRoot.Remove("packed-refs.new")+ }()
+
+ _, err = lock.WriteString("# pack-refs with: peeled fully-peeled sorted\n")+ if err != nil {+ _ = lock.Close()
+
+ return err
+ }
+
+ for _, entry := range packed.ordered {+ if _, skip := deleted[entry.Name()]; skip {+ continue
+ }
+
+ _, err = lock.WriteString(entry.ID.String() + " " + entry.Name() + "\n")
+ if err != nil {+ _ = lock.Close()
+
+ return err
+ }
+
+ if entry.Peeled != nil {+ _, err = lock.WriteString("^" + entry.Peeled.String() + "\n")+ if err != nil {+ _ = lock.Close()
+
+ return err
+ }
+ }
+ }
+
+ err = lock.Close()
+ if err != nil {+ return err
+ }
+
+ err = tx.store.commonRoot.Rename("packed-refs.new", "packed-refs")+ if err != nil {+ return err
+ }
+
+ createdTemp = false
+
+ return nil
+}
--- /dev/null
+++ b/refstore/files/worktree_test.go
@@ -1,0 +1,206 @@
+package files_test
+
+import (
+ "errors"
+ "slices"
+ "testing"
+
+ "codeberg.org/lindenii/furgit/internal/testgit"
+ "codeberg.org/lindenii/furgit/objectid"
+ "codeberg.org/lindenii/furgit/refstore"
+)
+
+func TestFilesWorktreeRefsMatchGit(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, RefFormat: "files"})+
+ testRepo.Run(t, "commit", "--allow-empty", "-m", "initial")
+
+ initialID, err := objectid.ParseHex(algo, testRepo.Run(t, "rev-parse", "HEAD"))
+ if err != nil {+ t.Fatalf("ParseHex(initial HEAD): %v", err)+ }
+
+ testRepo.Run(t, "branch", "wt1", initialID.String())
+ testRepo.Run(t, "branch", "wt2", initialID.String())
+ testRepo.Run(t, "worktree", "add", "wt1", "wt1")
+ testRepo.Run(t, "worktree", "add", "wt2", "wt2")
+
+ testRepo.Run(t, "-C", "wt1", "commit", "--allow-empty", "-m", "wt1")
+ testRepo.Run(t, "-C", "wt2", "commit", "--allow-empty", "-m", "wt2")
+
+ wt1ID, err := objectid.ParseHex(algo, testRepo.Run(t, "-C", "wt1", "rev-parse", "HEAD"))
+ if err != nil {+ t.Fatalf("ParseHex(wt1 HEAD): %v", err)+ }
+
+ wt2ID, err := objectid.ParseHex(algo, testRepo.Run(t, "-C", "wt2", "rev-parse", "HEAD"))
+ if err != nil {+ t.Fatalf("ParseHex(wt2 HEAD): %v", err)+ }
+
+ testRepo.UpdateRef(t, "refs/worktree/foo", initialID)
+ testRepo.Run(t, "-C", "wt1", "update-ref", "refs/worktree/foo", wt1ID.String())
+ testRepo.Run(t, "-C", "wt2", "update-ref", "refs/worktree/foo", wt2ID.String())
+
+ mainStore := openFilesStore(t, testRepo, algo)
+ repoRoot := testRepo.OpenRoot(t)
+ wt1Store := openFilesStoreAt(t, openGitRootUnder(t, repoRoot, "wt1"), algo)
+ wt2Store := openFilesStoreAt(t, openGitRootUnder(t, repoRoot, "wt2"), algo)
+
+ got, err := mainStore.ResolveFully("refs/worktree/foo")+ if err != nil {+ t.Fatalf("ResolveFully(main refs/worktree/foo): %v", err)+ }
+
+ if got.ID != initialID {+ t.Fatalf("ResolveFully(main refs/worktree/foo) = %s, want %s", got.ID, initialID)+ }
+
+ got, err = wt1Store.ResolveFully("refs/worktree/foo")+ if err != nil {+ t.Fatalf("ResolveFully(wt1 refs/worktree/foo): %v", err)+ }
+
+ if got.ID != wt1ID {+ t.Fatalf("ResolveFully(wt1 refs/worktree/foo) = %s, want %s", got.ID, wt1ID)+ }
+
+ got, err = wt2Store.ResolveFully("refs/worktree/foo")+ if err != nil {+ t.Fatalf("ResolveFully(wt2 refs/worktree/foo): %v", err)+ }
+
+ if got.ID != wt2ID {+ t.Fatalf("ResolveFully(wt2 refs/worktree/foo) = %s, want %s", got.ID, wt2ID)+ }
+
+ got, err = wt1Store.ResolveFully("main-worktree/HEAD")+ if err != nil {+ t.Fatalf("ResolveFully(wt1 main-worktree/HEAD): %v", err)+ }
+
+ if got.ID != initialID {+ t.Fatalf("ResolveFully(wt1 main-worktree/HEAD) = %s, want %s", got.ID, initialID)+ }
+
+ got, err = mainStore.ResolveFully("worktrees/wt1/HEAD")+ if err != nil {+ t.Fatalf("ResolveFully(main worktrees/wt1/HEAD): %v", err)+ }
+
+ if got.ID != wt1ID {+ t.Fatalf("ResolveFully(main worktrees/wt1/HEAD) = %s, want %s", got.ID, wt1ID)+ }
+
+ got, err = wt2Store.ResolveFully("worktrees/wt1/HEAD")+ if err != nil {+ t.Fatalf("ResolveFully(wt2 worktrees/wt1/HEAD): %v", err)+ }
+
+ if got.ID != wt1ID {+ t.Fatalf("ResolveFully(wt2 worktrees/wt1/HEAD) = %s, want %s", got.ID, wt1ID)+ }
+
+ assertListMatchesGitForEachRef(t, testRepo.Run(t, "for-each-ref", "--format=%(refname)"), mainStore)
+ assertListMatchesGitForEachRef(t, testRepo.Run(t, "-C", "wt1", "for-each-ref", "--format=%(refname)"), wt1Store)
+ })
+}
+
+func TestFilesTransactionPerWorktreeRefsMatchGit(t *testing.T) {+ t.Parallel()
+
+ testgit.ForEachAlgorithm(t, func(t *testing.T, algo objectid.Algorithm) { //nolint:thelper+ testRepo := testgit.NewRepo(t, testgit.RepoOptions{ObjectFormat: algo, RefFormat: "files"})+ testRepo.Run(t, "commit", "--allow-empty", "-m", "initial")
+ testRepo.Run(t, "branch", "wt1", "HEAD")
+ testRepo.Run(t, "worktree", "add", "wt1", "wt1")
+
+ mainID, err := objectid.ParseHex(algo, testRepo.Run(t, "rev-parse", "HEAD"))
+ if err != nil {+ t.Fatalf("ParseHex(main HEAD): %v", err)+ }
+
+ testRepo.Run(t, "-C", "wt1", "commit", "--allow-empty", "-m", "wt1")
+
+ wt1ID, err := objectid.ParseHex(algo, testRepo.Run(t, "-C", "wt1", "rev-parse", "HEAD"))
+ if err != nil {+ t.Fatalf("ParseHex(wt1 HEAD): %v", err)+ }
+
+ mainStore := openFilesStore(t, testRepo, algo)
+ repoRoot := testRepo.OpenRoot(t)
+ wt1Store := openFilesStoreAt(t, openGitRootUnder(t, repoRoot, "wt1"), algo)
+
+ mainTx, err := mainStore.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(main): %v", err)+ }
+
+ err = mainTx.Create("refs/bisect/main-only", mainID)+ if err != nil {+ t.Fatalf("Create(main-only) queue: %v", err)+ }
+
+ err = mainTx.Commit()
+ if err != nil {+ t.Fatalf("Commit(main-only): %v", err)+ }
+
+ wtTx, err := wt1Store.BeginTransaction()
+ if err != nil {+ t.Fatalf("BeginTransaction(wt1): %v", err)+ }
+
+ err = wtTx.Create("refs/bisect/wt-only", wt1ID)+ if err != nil {+ t.Fatalf("Create(wt-only) queue: %v", err)+ }
+
+ err = wtTx.Commit()
+ if err != nil {+ t.Fatalf("Commit(wt-only): %v", err)+ }
+
+ got, err := mainStore.ResolveFully("refs/bisect/main-only")+ if err != nil {+ t.Fatalf("ResolveFully(main-only): %v", err)+ }
+
+ if got.ID != mainID {+ t.Fatalf("ResolveFully(main-only) = %s, want %s", got.ID, mainID)+ }
+
+ got, err = wt1Store.ResolveFully("refs/bisect/wt-only")+ if err != nil {+ t.Fatalf("ResolveFully(wt-only): %v", err)+ }
+
+ if got.ID != wt1ID {+ t.Fatalf("ResolveFully(wt-only) = %s, want %s", got.ID, wt1ID)+ }
+
+ _, err = mainStore.Resolve("refs/bisect/wt-only")+ if !errors.Is(err, refstore.ErrReferenceNotFound) {+ t.Fatalf("Resolve(main sees wt-only) error = %v, want ErrReferenceNotFound", err)+ }
+
+ _, err = wt1Store.Resolve("refs/bisect/main-only")+ if !errors.Is(err, refstore.ErrReferenceNotFound) {+ t.Fatalf("Resolve(wt sees main-only) error = %v, want ErrReferenceNotFound", err)+ }
+
+ mainRefs := forEachRefLines(testRepo.Run(t, "for-each-ref", "--format=%(refname)", "refs/bisect"))
+
+ wtRefs := forEachRefLines(testRepo.Run(t, "-C", "wt1", "for-each-ref", "--format=%(refname)", "refs/bisect"))
+ if !slices.Equal(mainRefs, []string{"refs/bisect/main-only"}) {+ t.Fatalf("main for-each-ref refs/bisect = %v", mainRefs)+ }
+
+ if !slices.Equal(wtRefs, []string{"refs/bisect/wt-only"}) {+ t.Fatalf("wt1 for-each-ref refs/bisect = %v", wtRefs)+ }
+ })
+}
--- a/repository/refs.go
+++ b/repository/refs.go
@@ -1,50 +1,29 @@
package repository
import (
- "errors"
"fmt"
"os"
"codeberg.org/lindenii/furgit/objectid"
"codeberg.org/lindenii/furgit/refstore"
- refchain "codeberg.org/lindenii/furgit/refstore/chain"
- refloose "codeberg.org/lindenii/furgit/refstore/loose"
- refpacked "codeberg.org/lindenii/furgit/refstore/packed"
+ reffiles "codeberg.org/lindenii/furgit/refstore/files"
)
//nolint:ireturn
func openRefStore(root *os.Root, algo objectid.Algorithm) (out refstore.ReadingStore, err error) {- looseRoot, err := root.OpenRoot(".")+ refRoot, err := root.OpenRoot(".") if err != nil {- return nil, fmt.Errorf("repository: open root for loose refs: %w", err)+ return nil, fmt.Errorf("repository: open root for refs: %w", err)}
- looseStore, err := refloose.New(looseRoot, algo)
+ store, err := reffiles.New(refRoot, algo)
if err != nil {- _ = looseRoot.Close()
+ _ = refRoot.Close()
return nil, err
}
- backends := []refstore.ReadingStore{looseStore}-
- _, err = root.Stat("packed-refs")- if err == nil {- packedStore, packedErr := refpacked.New(root, algo)
- if packedErr != nil {- _ = looseStore.Close()
-
- return nil, packedErr
- }
-
- backends = append(backends, packedStore)
- } else if !errors.Is(err, os.ErrNotExist) {- _ = looseStore.Close()
-
- return nil, fmt.Errorf("repository: stat packed-refs: %w", err)- }
-
- return refchain.New(backends...), nil
+ return store, nil
}
// Refs returns the configured ref store.
--
⑨