From ff4ec22d3b789d79dffd3a0191b464eb0388380a Mon Sep 17 00:00:00 2001 From: perber Date: Tue, 6 Jan 2026 17:32:34 +0100 Subject: [PATCH 01/11] feat: write frontmatter (#563) --- go.mod | 1 + go.sum | 9 + internal/core/tree/errors.go | 1 + internal/core/tree/frontmatter.go | 145 +++++++++ internal/core/tree/frontmatter_test.go | 399 ++++++++++++++++++++++++ internal/core/tree/page_store.go | 35 ++- internal/core/tree/page_store_test.go | 8 +- internal/core/tree/tree_service_test.go | 6 +- 8 files changed, 590 insertions(+), 14 deletions(-) create mode 100644 internal/core/tree/frontmatter.go create mode 100644 internal/core/tree/frontmatter_test.go diff --git a/go.mod b/go.mod index 27af2754..84504c29 100644 --- a/go.mod +++ b/go.mod @@ -33,6 +33,7 @@ require ( github.com/gosimple/unidecode v1.0.1 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/cpuid/v2 v2.3.0 // indirect + github.com/kr/text v0.2.0 // indirect github.com/leodido/go-urn v1.4.0 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect diff --git a/go.sum b/go.sum index 0b866873..0dfcad92 100644 --- a/go.sum +++ b/go.sum @@ -6,6 +6,7 @@ github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZw github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -52,6 +53,10 @@ github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnr github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y= github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= @@ -75,6 +80,8 @@ github.com/quic-go/quic-go v0.57.0 h1:AsSSrrMs4qI/hLrKlTH/TGQeTMY0ib1pAOX7vA3Adq github.com/quic-go/quic-go v0.57.0/go.mod h1:ly4QBAjHA2VhdnxhojRsCUOeJwKYg+taDlos92xb1+s= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= +github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= +github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -120,6 +127,8 @@ golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc google.golang.org/protobuf v1.36.9 h1:w2gp2mA27hUeUzj9Ex9FBjsBm40zfaDtEWow293U7Iw= google.golang.org/protobuf v1.36.9/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/core/tree/errors.go b/internal/core/tree/errors.go index 5572d618..a0189925 100644 --- a/internal/core/tree/errors.go +++ b/internal/core/tree/errors.go @@ -10,3 +10,4 @@ var ErrPageAlreadyExists = errors.New("page already exists") var ErrMovePageCircularReference = errors.New("circular reference detected") var ErrPageCannotBeMovedToItself = errors.New("page cannot be moved to itself") var ErrInvalidSortOrder = errors.New("invalid sort order") +var ErrFrontmatterParse = errors.New("frontmatter parse error") diff --git a/internal/core/tree/frontmatter.go b/internal/core/tree/frontmatter.go new file mode 100644 index 00000000..defee8b5 --- /dev/null +++ b/internal/core/tree/frontmatter.go @@ -0,0 +1,145 @@ +package tree + +import ( + "bytes" + "errors" + "strings" + + yaml "gopkg.in/yaml.v3" +) + +type Frontmatter struct { + LeafWikiID string `yaml:"leafwiki_id,omitempty" json:"id,omitempty"` + LeafWikiTitle string `yaml:"leafwiki_title,omitempty" json:"title,omitempty"` +} + +func SplitFrontmatter(md string) (yamlPart string, body string, has bool) { + // BOM-safe + normalize newlines + s := strings.TrimPrefix(md, "\ufeff") + s = strings.ReplaceAll(s, "\r\n", "\n") + s = strings.ReplaceAll(s, "\r", "\n") + + // Must start with '---' on the very first line + if !(s == "---" || strings.HasPrefix(s, "---\n")) { + return "", md, false + } + + // Find end of first line + firstNL := strings.IndexByte(s, '\n') + if firstNL == -1 { + // it's exactly "---" (or a single-line file) + return "", md, false + } + if strings.TrimSpace(s[:firstNL]) != "---" { + return "", md, false + } + + // Find closing delimiter on its own line: "\n---\n" or "\n---" at EOF + // We'll scan line-by-line using indices. + pos := firstNL + 1 + yamlStart := pos + + endDelimLineStart := -1 + endDelimLineEnd := -1 + + looksLikeYAML := false + + for pos <= len(s) { + // find end of current line + nextNL := strings.IndexByte(s[pos:], '\n') + var line string + var lineEnd int + if nextNL == -1 { + // last line + lineEnd = len(s) + line = s[pos:lineEnd] + } else { + lineEnd = pos + nextNL + line = s[pos:lineEnd] + } + + trim := strings.TrimSpace(line) + if trim == "---" { + endDelimLineStart = pos + endDelimLineEnd = lineEnd + break + } + + // Heuristic: at least one "key:" line => treat as YAML frontmatter + // Skip blanks/comments + if trim != "" && !strings.HasPrefix(trim, "#") { + if idx := strings.IndexByte(trim, ':'); idx > 0 { + key := strings.TrimSpace(trim[:idx]) + if key != "" && strings.IndexFunc(key, func(r rune) bool { + return !(r >= 'a' && r <= 'z' || + r >= 'A' && r <= 'Z' || + r >= '0' && r <= '9' || + r == '_' || r == '-') + }) == -1 { + looksLikeYAML = true + } + } + } + + // advance to next line + if nextNL == -1 { + pos = len(s) + 1 + } else { + pos = lineEnd + 1 + } + } + + // No closing delimiter found => treat as no frontmatter + if endDelimLineStart == -1 { + return "", md, false + } + + // If it doesn't look like YAML, treat as plain markdown (separator use-case) + if !looksLikeYAML { + return "", md, false + } + + // YAML is between yamlStart and the start of the closing delimiter line + yamlPart = s[yamlStart:endDelimLineStart] + yamlPart = strings.TrimSuffix(yamlPart, "\n") // nice-to-have + + // Body starts after the closing delimiter line (+ its trailing newline if present) + bodyStart := endDelimLineEnd + if bodyStart < len(s) && s[bodyStart:bodyStart+1] == "\n" { + bodyStart++ + } + body = s[bodyStart:] + + return yamlPart, body, true +} + +func ParseFrontmatter(md string) (fm Frontmatter, body string, has bool, err error) { + yamlPart, body, has := SplitFrontmatter(md) + if !has { + return Frontmatter{}, md, false, nil + } + + if err := yaml.Unmarshal([]byte(yamlPart), &fm); err != nil { + return Frontmatter{}, md, true, errors.Join(ErrFrontmatterParse, err) + } + return fm, body, true, nil +} + +func BuildMarkdownWithFrontmatter(fm Frontmatter, body string) (string, error) { + // Avoid emitting empty frontmatter like "{}" + if strings.TrimSpace(fm.LeafWikiID) == "" { + return body, nil + } + + b, err := yaml.Marshal(fm) + if err != nil { + return "", err + } + + var out bytes.Buffer + out.WriteString("---\n") + out.Write(b) // yaml.v3 usually ends with \n, which is fine + out.WriteString("---\n") + out.WriteString(body) + return out.String(), nil +} diff --git a/internal/core/tree/frontmatter_test.go b/internal/core/tree/frontmatter_test.go new file mode 100644 index 00000000..8791f509 --- /dev/null +++ b/internal/core/tree/frontmatter_test.go @@ -0,0 +1,399 @@ +package tree + +import ( + "errors" + "testing" +) + +func TestSplitFrontmatter(t *testing.T) { + tests := []struct { + name string + input string + wantFM string + wantBody string + wantHas bool + }{ + { + name: "no frontmatter", + input: "# Hello\nWorld\n", + wantFM: "", + wantBody: "# Hello\nWorld\n", + wantHas: false, + }, + { + name: "simple frontmatter", + input: "---\nleafwiki_id: abc123\n---\n# Title\n", + wantFM: "leafwiki_id: abc123", + wantBody: "# Title\n", + wantHas: true, + }, + { + name: "frontmatter with blank line", + input: "---\nleafwiki_id: abc123\n\n---\nBody\n", + wantFM: "leafwiki_id: abc123\n", + wantBody: "Body\n", + wantHas: true, + }, + { + name: "frontmatter with comments", + input: "---\n# comment\nleafwiki_id: abc123\n---\nBody\n", + wantFM: "# comment\nleafwiki_id: abc123", + wantBody: "Body\n", + wantHas: true, + }, + { + name: "only separator at top (no YAML)", + input: "---\nHello\nWorld\n---\nBody\n", + wantFM: "", + wantBody: "---\nHello\nWorld\n---\nBody\n", + wantHas: false, + }, + { + name: "horizontal rule later in document", + input: "# Title\n\n---\n\nText\n", + wantFM: "", + wantBody: "# Title\n\n---\n\nText\n", + wantHas: false, + }, + { + name: "unclosed frontmatter", + input: "---\nleafwiki_id: abc123\nBody\n", + wantFM: "", + wantBody: "---\nleafwiki_id: abc123\nBody\n", + wantHas: false, + }, + { + name: "empty frontmatter block", + input: "---\n---\nBody\n", + wantFM: "", + wantBody: "---\n---\nBody\n", + wantHas: false, + }, + { + name: "frontmatter with windows line endings", + input: "---\r\nleafwiki_id: abc123\r\n---\r\nBody\r\n", + wantFM: "leafwiki_id: abc123", + wantBody: "Body\n", + wantHas: true, + }, + { + name: "frontmatter with BOM", + input: "\ufeff---\nleafwiki_id: abc123\n---\nBody\n", + wantFM: "leafwiki_id: abc123", + wantBody: "Body\n", + wantHas: true, + }, + { + name: "yaml but no key colon (treated as no frontmatter)", + input: "---\n- item1\n- item2\n---\nBody\n", + wantFM: "", + wantBody: "---\n- item1\n- item2\n---\nBody\n", + wantHas: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fm, body, has := SplitFrontmatter(tt.input) + + if has != tt.wantHas { + t.Fatalf("has = %v, want %v", has, tt.wantHas) + } + if fm != tt.wantFM { + t.Fatalf("frontmatter = %q, want %q", fm, tt.wantFM) + } + if body != tt.wantBody { + t.Fatalf("body = %q, want %q", body, tt.wantBody) + } + }) + } +} + +func TestParseFrontmatter(t *testing.T) { + tests := []struct { + name string + input string + wantFM Frontmatter + wantBody string + wantHas bool + wantErr bool + wantErrType error + }{ + { + name: "no frontmatter", + input: "# Hello\nWorld\n", + wantFM: Frontmatter{}, + wantBody: "# Hello\nWorld\n", + wantHas: false, + wantErr: false, + }, + { + name: "valid frontmatter with ID only", + input: "---\nleafwiki_id: abc123\n---\n# Title\nContent", + wantFM: Frontmatter{ + LeafWikiID: "abc123", + }, + wantBody: "# Title\nContent", + wantHas: true, + wantErr: false, + }, + { + name: "valid frontmatter with title only", + input: "---\nleafwiki_title: My Title\n---\n# Title\nContent", + wantFM: Frontmatter{ + LeafWikiTitle: "My Title", + }, + wantBody: "# Title\nContent", + wantHas: true, + wantErr: false, + }, + { + name: "valid frontmatter with both ID and title", + input: "---\nleafwiki_id: abc123\nleafwiki_title: My Title\n---\n# Title\nContent", + wantFM: Frontmatter{ + LeafWikiID: "abc123", + LeafWikiTitle: "My Title", + }, + wantBody: "# Title\nContent", + wantHas: true, + wantErr: false, + }, + { + name: "empty YAML frontmatter", + input: "---\nkey: value\n---\nBody", + wantFM: Frontmatter{}, + wantBody: "Body", + wantHas: true, + wantErr: false, + }, + { + name: "invalid YAML in frontmatter", + input: "---\nleafwiki_id: [invalid: yaml: structure\n---\nBody", + wantFM: Frontmatter{}, + wantBody: "---\nleafwiki_id: [invalid: yaml: structure\n---\nBody", + wantHas: true, + wantErr: true, + wantErrType: ErrFrontmatterParse, + }, + { + name: "malformed YAML - unclosed brackets", + input: "---\nleafwiki_id: {unclosed\n---\nBody", + wantFM: Frontmatter{}, + wantBody: "---\nleafwiki_id: {unclosed\n---\nBody", + wantHas: true, + wantErr: true, + wantErrType: ErrFrontmatterParse, + }, + { + name: "frontmatter with extra fields (ignored)", + input: "---\nleafwiki_id: abc123\nextra_field: ignored\n---\nBody", + wantFM: Frontmatter{ + LeafWikiID: "abc123", + }, + wantBody: "Body", + wantHas: true, + wantErr: false, + }, + { + name: "frontmatter with whitespace in values", + input: "---\nleafwiki_id: \" abc123 \"\nleafwiki_title: \" My Title \"\n---\nBody", + wantFM: Frontmatter{ + LeafWikiID: " abc123 ", + LeafWikiTitle: " My Title ", + }, + wantBody: "Body", + wantHas: true, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fm, body, has, err := ParseFrontmatter(tt.input) + + if (err != nil) != tt.wantErr { + t.Fatalf("ParseFrontmatter() error = %v, wantErr %v", err, tt.wantErr) + } + + if tt.wantErr && tt.wantErrType != nil { + if !errors.Is(err, tt.wantErrType) { + t.Fatalf("ParseFrontmatter() error = %v, want error type %v", err, tt.wantErrType) + } + } + + if has != tt.wantHas { + t.Fatalf("has = %v, want %v", has, tt.wantHas) + } + + if fm != tt.wantFM { + t.Fatalf("frontmatter = %+v, want %+v", fm, tt.wantFM) + } + + if body != tt.wantBody { + t.Fatalf("body = %q, want %q", body, tt.wantBody) + } + }) + } +} + +func TestBuildMarkdownWithFrontmatter(t *testing.T) { + tests := []struct { + name string + fm Frontmatter + body string + want string + wantErr bool + }{ + { + name: "empty frontmatter struct", + fm: Frontmatter{}, + body: "# Title\nContent", + want: "# Title\nContent", + }, + { + name: "frontmatter with empty ID", + fm: Frontmatter{ + LeafWikiID: "", + }, + body: "# Title\nContent", + want: "# Title\nContent", + }, + { + name: "frontmatter with whitespace-only ID", + fm: Frontmatter{ + LeafWikiID: " ", + }, + body: "# Title\nContent", + want: "# Title\nContent", + }, + { + name: "frontmatter with ID only", + fm: Frontmatter{ + LeafWikiID: "abc123", + }, + body: "# Title\nContent", + want: "---\nleafwiki_id: abc123\n---\n# Title\nContent", + }, + { + name: "frontmatter with title only", + fm: Frontmatter{ + LeafWikiTitle: "My Title", + }, + body: "# Title\nContent", + want: "# Title\nContent", + }, + { + name: "frontmatter with both ID and title", + fm: Frontmatter{ + LeafWikiID: "abc123", + LeafWikiTitle: "My Title", + }, + body: "# Title\nContent", + want: "---\nleafwiki_id: abc123\nleafwiki_title: My Title\n---\n# Title\nContent", + }, + { + name: "empty body", + fm: Frontmatter{ + LeafWikiID: "abc123", + }, + body: "", + want: "---\nleafwiki_id: abc123\n---\n", + }, + { + name: "body with newlines", + fm: Frontmatter{ + LeafWikiID: "abc123", + }, + body: "# Title\n\nParagraph 1\n\nParagraph 2\n", + want: "---\nleafwiki_id: abc123\n---\n# Title\n\nParagraph 1\n\nParagraph 2\n", + }, + { + name: "frontmatter with special characters in values", + fm: Frontmatter{ + LeafWikiID: "abc-123_xyz", + LeafWikiTitle: "Title: With Special & Characters", + }, + body: "Content", + want: "---\nleafwiki_id: abc-123_xyz\nleafwiki_title: 'Title: With Special & Characters'\n---\nContent", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := BuildMarkdownWithFrontmatter(tt.fm, tt.body) + + if (err != nil) != tt.wantErr { + t.Fatalf("BuildMarkdownWithFrontmatter() error = %v, wantErr %v", err, tt.wantErr) + } + + if got != tt.want { + t.Fatalf("BuildMarkdownWithFrontmatter() =\n%q\nwant:\n%q", got, tt.want) + } + }) + } +} + +func TestParseFrontmatterAndBuildRoundtrip(t *testing.T) { + tests := []struct { + name string + input string + wantBody string + }{ + { + name: "no frontmatter", + input: "# Title\nContent", + wantBody: "# Title\nContent", + }, + { + name: "with ID only", + input: "---\nleafwiki_id: abc123\n---\n# Title\nContent", + wantBody: "# Title\nContent", + }, + { + name: "with ID and title", + input: "---\nleafwiki_id: abc123\nleafwiki_title: My Title\n---\n# Title\nContent", + wantBody: "# Title\nContent", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Parse the original markdown + fm, body, has, err := ParseFrontmatter(tt.input) + if err != nil { + t.Fatalf("ParseFrontmatter() error = %v", err) + } + + if body != tt.wantBody { + t.Fatalf("body after parse = %q, want %q", body, tt.wantBody) + } + + // Rebuild markdown with frontmatter + rebuilt, err := BuildMarkdownWithFrontmatter(fm, body) + if err != nil { + t.Fatalf("BuildMarkdownWithFrontmatter() error = %v", err) + } + + // Parse again to verify + fm2, body2, has2, err := ParseFrontmatter(rebuilt) + if err != nil { + t.Fatalf("ParseFrontmatter() second parse error = %v", err) + } + + // Check that has flag is consistent + if has != has2 { + t.Fatalf("has flag changed: first=%v, second=%v", has, has2) + } + + // Check frontmatter is preserved + if fm != fm2 { + t.Fatalf("frontmatter changed: first=%+v, second=%+v", fm, fm2) + } + + // Check body is preserved + if body != body2 { + t.Fatalf("body changed: first=%q, second=%q", body, body2) + } + }) + } +} diff --git a/internal/core/tree/page_store.go b/internal/core/tree/page_store.go index c412e065..95aeb409 100644 --- a/internal/core/tree/page_store.go +++ b/internal/core/tree/page_store.go @@ -108,7 +108,12 @@ func (f *PageStore) CreatePage(parentEntry *PageNode, newEntry *PageNode) error } // Create an empty index.md file / Fallback! indexPath := path.Join(parentPath, "index.md") - if err := shared.WriteFileAtomic(indexPath, []byte(""), 0o644); err != nil { + fm := Frontmatter{LeafWikiID: parentEntry.ID, LeafWikiTitle: parentEntry.Title} + content, err := BuildMarkdownWithFrontmatter(fm, "") + if err != nil { + return fmt.Errorf("could not build markdown with frontmatter: %v", err) + } + if err := shared.WriteFileAtomic(indexPath, []byte(content), 0o644); err != nil { return fmt.Errorf("could not create index file: %v", err) } } @@ -121,11 +126,14 @@ func (f *PageStore) CreatePage(parentEntry *PageNode, newEntry *PageNode) error } // Create the file - content := []byte("# " + newEntry.Title + "\n") - if err := shared.WriteFileAtomic(newFilename, content, 0o644); err != nil { + fm := Frontmatter{LeafWikiID: newEntry.ID} + content, err := BuildMarkdownWithFrontmatter(fm, "# "+newEntry.Title+"\n") + if err != nil { + return fmt.Errorf("could not build markdown with frontmatter: %v", err) + } + if err := shared.WriteFileAtomic(newFilename, []byte(content), 0o644); err != nil { return fmt.Errorf("could not create file: %v", err) } - return nil } @@ -179,7 +187,12 @@ func (f *PageStore) UpdatePage(entry *PageNode, slug string, content string) err mode := file.Mode() // Update the file content - if err := shared.WriteFileAtomic(filePath, []byte(content), mode); err != nil { + fm := Frontmatter{LeafWikiID: entry.ID, LeafWikiTitle: entry.Title} + contentWithFM, err := BuildMarkdownWithFrontmatter(fm, content) + if err != nil { + return fmt.Errorf("could not build markdown with frontmatter: %v", err) + } + if err := shared.WriteFileAtomic(filePath, []byte(contentWithFM), mode); err != nil { return fmt.Errorf("could not write to file atomically: %v", err) } @@ -269,18 +282,24 @@ func (f *PageStore) ReadPageContent(entry *PageNode) (string, error) { return "", fmt.Errorf("file not found: %v", err) } - // Read the file content + // Read the file file, err := os.Open(filePath) if err != nil { return "", fmt.Errorf("could not open file: %v", err) } defer file.Close() - content, err := io.ReadAll(file) + raw, err := io.ReadAll(file) if err != nil { return "", fmt.Errorf("could not read file: %v", err) } - return string(content), nil + + _, content, _, err := ParseFrontmatter(string(raw)) + if err != nil { + return string(raw), err + } + + return content, nil } func (f *PageStore) getFilePath(entry *PageNode) (string, error) { diff --git a/internal/core/tree/page_store_test.go b/internal/core/tree/page_store_test.go index 90f9129b..216aea99 100644 --- a/internal/core/tree/page_store_test.go +++ b/internal/core/tree/page_store_test.go @@ -41,7 +41,7 @@ func TestPageStore_CreatePage(t *testing.T) { t.Fatalf("Failed to read file: %v", err) } - expected := "# Hello World\n" + expected := "---\nleafwiki_id: page-1\n---\n# Hello World\n" if string(content) != expected { t.Errorf("Unexpected file content. Got: %q, Expected: %q", string(content), expected) } @@ -253,8 +253,10 @@ func TestPageStore_UpdatePage_ContentOnly(t *testing.T) { t.Fatalf("Could not read updated file: %v", err) } - if string(data) != newContent { - t.Errorf("Expected content %q, got %q", newContent, string(data)) + expectedNewContent := "---\nleafwiki_id: p1\nleafwiki_title: My Page\n---\n# New Content" + + if string(data) != expectedNewContent { + t.Errorf("Expected content %q, got %q", expectedNewContent, string(data)) } } diff --git a/internal/core/tree/tree_service_test.go b/internal/core/tree/tree_service_test.go index 9a63ab64..542ae2ca 100644 --- a/internal/core/tree/tree_service_test.go +++ b/internal/core/tree/tree_service_test.go @@ -189,7 +189,7 @@ func TestTreeService_UpdatePage_ContentAndSlug(t *testing.T) { if err != nil { t.Fatalf("Failed to read file: %v", err) } - if string(data) != newContent { + if !strings.Contains(string(data), newContent) { t.Errorf("Expected content %q, got %q", newContent, string(data)) } } @@ -199,7 +199,7 @@ func TestTreeService_UpdatePage_FileNotFound(t *testing.T) { service := NewTreeService(tmpDir) _ = service.LoadTree() - // Seite im Baum erzeugen, aber Datei nicht schreiben + // Create a page in the tree but do not create the corresponding file id := "ghost" page := &PageNode{ ID: id, @@ -274,7 +274,7 @@ func TestTreeService_DeletePage_HasChildrenWithoutRecursive(t *testing.T) { t.Fatalf("CreatePage (child) failed: %v", err) } - // Versuch ohne Rekursion + // Try deleting parent without recursive err = service.DeletePage("system", parent.ID, false) if err == nil { t.Error("Expected error when deleting parent with children without recursive flag") From 03d12bd73f2bcc6d6acf1a425e8d69a30ae2cc09 Mon Sep 17 00:00:00 2001 From: perber Date: Wed, 7 Jan 2026 13:03:55 +0100 Subject: [PATCH 02/11] feat: add migration to add frontmatter to markdown files (#565) --- cmd/leafwiki/main.go | 16 + internal/core/tree/errors.go | 1 + internal/core/tree/page_store.go | 15 +- internal/core/tree/schema.go | 2 +- internal/core/tree/tree_service.go | 157 ++++++++-- internal/core/tree/tree_service_test.go | 386 ++++++++++++++++++++++++ 6 files changed, 557 insertions(+), 20 deletions(-) diff --git a/cmd/leafwiki/main.go b/cmd/leafwiki/main.go index 3dff3f43..92e32284 100644 --- a/cmd/leafwiki/main.go +++ b/cmd/leafwiki/main.go @@ -4,6 +4,7 @@ import ( "flag" "fmt" "log" + "log/slog" "os" "strings" "time" @@ -53,7 +54,22 @@ func printUsage() { `) } +func setupLogger() { + level := slog.LevelInfo + if os.Getenv("LOG_LEVEL") == "debug" { + level = slog.LevelDebug + } + + handler := slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{ + Level: level, + AddSource: true, + }) + + slog.SetDefault(slog.New(handler)) +} + func main() { + setupLogger() // flags hostFlag := flag.String("host", "", "host/IP address to bind the server to (e.g. 127.0.0.1 or 0.0.0.0)") diff --git a/internal/core/tree/errors.go b/internal/core/tree/errors.go index a0189925..fd004fd7 100644 --- a/internal/core/tree/errors.go +++ b/internal/core/tree/errors.go @@ -11,3 +11,4 @@ var ErrMovePageCircularReference = errors.New("circular reference detected") var ErrPageCannotBeMovedToItself = errors.New("page cannot be moved to itself") var ErrInvalidSortOrder = errors.New("invalid sort order") var ErrFrontmatterParse = errors.New("frontmatter parse error") +var ErrFileNotFound = errors.New("file not found") diff --git a/internal/core/tree/page_store.go b/internal/core/tree/page_store.go index 95aeb409..5df35a92 100644 --- a/internal/core/tree/page_store.go +++ b/internal/core/tree/page_store.go @@ -266,6 +266,19 @@ func (f *PageStore) MovePage(entry *PageNode, parentEntry *PageNode) error { return nil } +// ReadPageRaw returns the raw content of a page including frontmatter +func (f *PageStore) ReadPageRaw(entry *PageNode) (string, error) { + filePath, err := f.getFilePath(entry) + if err != nil { + return "", err + } + raw, err := os.ReadFile(filePath) + if err != nil { + return "", err + } + return string(raw), nil +} + // ReadPageContent returns the content of a page func (f *PageStore) ReadPageContent(entry *PageNode) (string, error) { if entry == nil { @@ -320,5 +333,5 @@ func (f *PageStore) getFilePath(entry *PageNode) (string, error) { return path.Join(entryPath, "index.md"), nil } - return "", errors.New("file not found") + return "", ErrFileNotFound } diff --git a/internal/core/tree/schema.go b/internal/core/tree/schema.go index ebc7e59d..42fdd8d3 100644 --- a/internal/core/tree/schema.go +++ b/internal/core/tree/schema.go @@ -7,7 +7,7 @@ import ( "path/filepath" ) -const CurrentSchemaVersion = 1 +const CurrentSchemaVersion = 2 type SchemaInfo struct { Version int `json:"version"` diff --git a/internal/core/tree/tree_service.go b/internal/core/tree/tree_service.go index 222a20f4..732e60da 100644 --- a/internal/core/tree/tree_service.go +++ b/internal/core/tree/tree_service.go @@ -1,8 +1,9 @@ package tree import ( + "errors" "fmt" - "log" + "log/slog" "os" "sort" "strings" @@ -19,6 +20,7 @@ type TreeService struct { treeFilename string tree *PageNode store *PageStore + log *slog.Logger mu sync.RWMutex } @@ -30,6 +32,7 @@ func NewTreeService(storageDir string) *TreeService { treeFilename: "tree.json", tree: nil, store: NewPageStore(storageDir), + log: slog.Default().With("component", "TreeService"), } } @@ -47,42 +50,56 @@ func (t *TreeService) LoadTree() error { } // Load the schema version - log.Printf("Checking schema version...") + t.log.Info("Checking schema version...") schema, err := loadSchema(t.storageDir) if err != nil { - log.Printf("Error loading schema: %v", err) + t.log.Error("Error loading schema", "error", err) return err } if schema.Version < CurrentSchemaVersion { - log.Printf("Migrating schema from version %d to %d...", schema.Version, CurrentSchemaVersion) + t.log.Info("Migrating schema", "fromVersion", schema.Version, "toVersion", CurrentSchemaVersion) if err := t.migrate(schema.Version); err != nil { - log.Printf("Error migrating schema: %v", err) + t.log.Error("Error migrating schema", "error", err) return err } - - // migration was successful, update schema version - if err := saveSchema(t.storageDir, CurrentSchemaVersion); err != nil { - log.Printf("Error saving schema: %v", err) - return err - } - - return t.saveTreeLocked() } return err } func (t *TreeService) migrate(fromVersion int) error { - if fromVersion < 1 { - if err := t.migrateTreeToV1Schema(); err != nil { + + for v := fromVersion; v < CurrentSchemaVersion; v++ { + switch v { + case 0: + if err := t.migrateToV1(); err != nil { + t.log.Error("Error migrating to v1", "error", err) + return err + } + case 1: + if err := t.migrateToV2(); err != nil { + t.log.Error("Error migrating to v2", "error", err) + return err + } + } + + // Save the tree after each migration step + if err := t.saveTreeLocked(); err != nil { + t.log.Error("Error saving tree after migration", "version", v+1, "error", err) + return err + } + + // Update the schema version file + if err := saveSchema(t.storageDir, v+1); err != nil { + t.log.Error("Error saving schema", "version", v+1, "error", err) return err } } return nil } -func (t *TreeService) migrateTreeToV1Schema() error { +func (t *TreeService) migrateToV1() error { // Backfill metadata for all pages var backfillMetadata func(node *PageNode) error backfillMetadata = func(node *PageNode) error { @@ -98,7 +115,7 @@ func (t *TreeService) migrateTreeToV1Schema() error { // Log the error and continue // We still want to backfill metadata for other nodes // but we cannot do it for this node - log.Printf("could not get file path for node %s: %v", node.ID, err) + t.log.Error("Could not get file path for node", "nodeID", node.ID, "error", err) return nil } @@ -111,7 +128,7 @@ func (t *TreeService) migrateTreeToV1Schema() error { createdAt = info.ModTime().UTC() updatedAt = info.ModTime().UTC() } else if !os.IsNotExist(err) { - log.Printf("could not stat file for node %s at path %s: %v", node.ID, filePath, err) + t.log.Error("Could not stat file for node", "nodeID", node.ID, "filePath", filePath, "error", err) } node.Metadata = PageMetadata{ @@ -129,9 +146,113 @@ func (t *TreeService) migrateTreeToV1Schema() error { return nil } + if t.tree == nil { + return ErrTreeNotLoaded + } + return backfillMetadata(t.tree) } +// migrateToV2 migrates the tree to the v2 schema +// Adds frontmatter to all existing pages if missing +func (t *TreeService) migrateToV2() error { + // Traverse all pages and add frontmatter if missing + var addFrontmatter func(node *PageNode) error + addFrontmatter = func(node *PageNode) error { + // Read the content of the page + content, err := t.store.ReadPageRaw(node) + if err != nil { + if errors.Is(err, os.ErrNotExist) || errors.Is(err, ErrFileNotFound) { + t.log.Warn("Page file does not exist, skipping frontmatter addition", "nodeID", node.ID) + // Recurse into children + for _, child := range node.Children { + if err := addFrontmatter(child); err != nil { + t.log.Error("Error adding frontmatter to child node", "nodeID", child.ID, "error", err) + return err + } + } + return nil + } + t.log.Error("Could not read page content for node", "nodeID", node.ID, "error", err) + return fmt.Errorf("could not read page content for node %s: %v", node.ID, err) + } + + // Parse the frontmatter + fm, body, has, err := ParseFrontmatter(content) + if err != nil { + t.log.Error("Could not parse frontmatter for node", "nodeID", node.ID, "error", err) + return fmt.Errorf("could not parse frontmatter for node %s: %v", node.ID, err) + } + + // Decide if we need to change anything + changed := false + + // If there is no frontmatter, start with a new one + if !has { + fm = Frontmatter{} + changed = true + } + + // Ensure required fields exist + if strings.TrimSpace(fm.LeafWikiID) == "" { + fm.LeafWikiID = node.ID + changed = true + } + // Optional but nice: keep title in sync *at least once* + // (you might choose to NOT overwrite existing title) + if strings.TrimSpace(fm.LeafWikiTitle) == "" { + fm.LeafWikiTitle = node.Title + changed = true + } + + // Only write if changed + if changed { + newContent, err := BuildMarkdownWithFrontmatter(fm, body) + if err != nil { + t.log.Error("could not build markdown with frontmatter", "nodeID", node.ID, "error", err) + return fmt.Errorf("could not build markdown with frontmatter for node %s: %w", node.ID, err) + } + + filePath, err := t.store.getFilePath(node) + if err != nil { + t.log.Error("could not get file path", "nodeID", node.ID, "error", err) + return fmt.Errorf("could not get file path for node %s: %w", node.ID, err) + } + + if err := writeFileAtomic(filePath, []byte(newContent), 0o644); err != nil { + t.log.Error("could not write updated page content", "nodeID", node.ID, "filePath", filePath, "error", err) + return fmt.Errorf("could not write updated page content for node %s: %w", node.ID, err) + } + + t.log.Info("frontmatter backfilled", "nodeID", node.ID, "path", filePath) + } + + // Recurse into children + for _, child := range node.Children { + if err := addFrontmatter(child); err != nil { + t.log.Error("Error adding frontmatter to child node", "nodeID", child.ID, "error", err) + return err + } + } + + return nil + } + + if t.tree == nil { + return ErrTreeNotLoaded + } + + // start the recursion from the children of the root + for _, child := range t.tree.Children { + if err := addFrontmatter(child); err != nil { + t.log.Error("Error adding frontmatter to child node", "nodeID", child.ID, "error", err) + return err + } + } + + return nil +} + // SaveTree saves the tree to the storage directory func (t *TreeService) SaveTree() error { t.mu.Lock() diff --git a/internal/core/tree/tree_service_test.go b/internal/core/tree/tree_service_test.go index 542ae2ca..752bb267 100644 --- a/internal/core/tree/tree_service_test.go +++ b/internal/core/tree/tree_service_test.go @@ -803,3 +803,389 @@ func TestTreeService_EnsurePagePath_PathStartingWithSlash(t *testing.T) { t.Errorf("expected nil result for invalid path") } } + +func TestTreeService_MigrateToV2_PagesWithoutFrontmatter(t *testing.T) { + tmpDir := t.TempDir() + service := NewTreeService(tmpDir) + _ = service.LoadTree() + + // Create pages without frontmatter + _, err := service.CreatePage("system", nil, "Page1", "page1") + if err != nil { + t.Fatalf("CreatePage failed: %v", err) + } + page1 := service.GetTree().Children[0] + + _, err = service.CreatePage("system", &page1.ID, "Page2", "page2") + if err != nil { + t.Fatalf("CreatePage failed: %v", err) + } + page2 := page1.Children[0] + + // Write content without frontmatter + page1Path := filepath.Join(tmpDir, "root", "page1.md") + page2Path := filepath.Join(tmpDir, "root", "page1", "page2.md") + + err = os.WriteFile(page1Path, []byte("# Page 1 Content\nHello World"), 0644) + if err != nil { + t.Fatalf("Failed to write page1: %v", err) + } + + err = os.WriteFile(page2Path, []byte("# Page 2 Content\nNested content"), 0644) + if err != nil { + t.Fatalf("Failed to write page2: %v", err) + } + + // Run migration + err = service.migrateToV2() + if err != nil { + t.Fatalf("migrateToV2 failed: %v", err) + } + + // Verify frontmatter was added to page1 + content1, err := os.ReadFile(page1Path) + if err != nil { + t.Fatalf("Failed to read page1 after migration: %v", err) + } + fm1, body1, has1, err := ParseFrontmatter(string(content1)) + if err != nil { + t.Fatalf("Failed to parse frontmatter for page1: %v", err) + } + if !has1 { + t.Error("Expected page1 to have frontmatter after migration") + } + if fm1.LeafWikiID != page1.ID { + t.Errorf("Expected page1 frontmatter ID to be %s, got %s", page1.ID, fm1.LeafWikiID) + } + if fm1.LeafWikiTitle != "Page1" { + t.Errorf("Expected page1 frontmatter title to be 'Page1', got %s", fm1.LeafWikiTitle) + } + if !strings.Contains(body1, "# Page 1 Content") { + t.Error("Expected page1 body to be preserved") + } + + // Verify frontmatter was added to page2 + content2, err := os.ReadFile(page2Path) + if err != nil { + t.Fatalf("Failed to read page2 after migration: %v", err) + } + fm2, body2, has2, err := ParseFrontmatter(string(content2)) + if err != nil { + t.Fatalf("Failed to parse frontmatter for page2: %v", err) + } + if !has2 { + t.Error("Expected page2 to have frontmatter after migration") + } + if fm2.LeafWikiID != page2.ID { + t.Errorf("Expected page2 frontmatter ID to be %s, got %s", page2.ID, fm2.LeafWikiID) + } + if !strings.Contains(body2, "# Page 2 Content") { + t.Error("Expected page2 body to be preserved") + } +} + +func TestTreeService_MigrateToV2_PagesWithExistingFrontmatter(t *testing.T) { + tmpDir := t.TempDir() + service := NewTreeService(tmpDir) + _ = service.LoadTree() + + // Create page + _, err := service.CreatePage("system", nil, "Page1", "page1") + if err != nil { + t.Fatalf("CreatePage failed: %v", err) + } + page1 := service.GetTree().Children[0] + + // Write content with existing frontmatter + page1Path := filepath.Join(tmpDir, "root", "page1.md") + existingContent := "---\nleafwiki_id: " + page1.ID + "\nleafwiki_title: Custom Title\n---\n# Page 1 Content" + err = os.WriteFile(page1Path, []byte(existingContent), 0644) + if err != nil { + t.Fatalf("Failed to write page1: %v", err) + } + + // Run migration + err = service.migrateToV2() + if err != nil { + t.Fatalf("migrateToV2 failed: %v", err) + } + + // Verify frontmatter was not modified (should be unchanged) + content1, err := os.ReadFile(page1Path) + if err != nil { + t.Fatalf("Failed to read page1 after migration: %v", err) + } + fm1, body1, has1, err := ParseFrontmatter(string(content1)) + if err != nil { + t.Fatalf("Failed to parse frontmatter for page1: %v", err) + } + if !has1 { + t.Error("Expected page1 to have frontmatter after migration") + } + if fm1.LeafWikiID != page1.ID { + t.Errorf("Expected page1 frontmatter ID to be %s, got %s", page1.ID, fm1.LeafWikiID) + } + if fm1.LeafWikiTitle != "Custom Title" { + t.Errorf("Expected page1 frontmatter title to be 'Custom Title', got %s", fm1.LeafWikiTitle) + } + if !strings.Contains(body1, "# Page 1 Content") { + t.Error("Expected page1 body to be preserved") + } +} + +func TestTreeService_MigrateToV2_MissingFiles(t *testing.T) { + tmpDir := t.TempDir() + service := NewTreeService(tmpDir) + _ = service.LoadTree() + + // Create a page and its file + _, err := service.CreatePage("system", nil, "Page1", "page1") + if err != nil { + t.Fatalf("CreatePage failed: %v", err) + } + + // Write content to page1 + page1Path := filepath.Join(tmpDir, "root", "page1.md") + err = os.WriteFile(page1Path, []byte("# Page 1 Content"), 0644) + if err != nil { + t.Fatalf("Failed to write page1: %v", err) + } + + // Create a page with a child + _, err = service.CreatePage("system", nil, "Parent", "parent") + if err != nil { + t.Fatalf("CreatePage failed: %v", err) + } + parent := service.GetTree().Children[1] + + _, err = service.CreatePage("system", &parent.ID, "Child", "child") + if err != nil { + t.Fatalf("CreatePage failed: %v", err) + } + + // Write content to child without frontmatter + childPath := filepath.Join(tmpDir, "root", "parent", "child.md") + err = os.WriteFile(childPath, []byte("# Child Content"), 0644) + if err != nil { + t.Fatalf("Failed to write child: %v", err) + } + + // Remove the parent index.md file (parent has children so it's in a folder) + parentIndexPath := filepath.Join(tmpDir, "root", "parent", "index.md") + if _, err := os.Stat(parentIndexPath); err == nil { + os.Remove(parentIndexPath) + } + + // Run migration - should handle missing parent file gracefully and still migrate child + err = service.migrateToV2() + if err != nil { + t.Fatalf("migrateToV2 should handle missing files gracefully, got error: %v", err) + } + + // Verify page1 was migrated + content1, err := os.ReadFile(page1Path) + if err != nil { + t.Fatalf("Failed to read page1 after migration: %v", err) + } + _, _, has1, err := ParseFrontmatter(string(content1)) + if err != nil { + t.Fatalf("Failed to parse frontmatter for page1: %v", err) + } + if !has1 { + t.Error("Expected page1 to have frontmatter after migration") + } + + // Verify child was still migrated even though parent file is missing + childContent, err := os.ReadFile(childPath) + if err != nil { + t.Fatalf("Failed to read child after migration: %v", err) + } + _, _, hasChild, err := ParseFrontmatter(string(childContent)) + if err != nil { + t.Fatalf("Failed to parse frontmatter for child: %v", err) + } + if !hasChild { + t.Error("Expected child to have frontmatter after migration even if parent file is missing") + } +} + +func TestTreeService_MigrateToV2_SkipsNonExistentFiles(t *testing.T) { + tmpDir := t.TempDir() + service := NewTreeService(tmpDir) + _ = service.LoadTree() + + // Create a simple page + _, err := service.CreatePage("system", nil, "Page1", "page1") + if err != nil { + t.Fatalf("CreatePage failed: %v", err) + } + + page1 := service.GetTree().Children[0] + // Write content without frontmatter + page1Path := filepath.Join(tmpDir, "root", "page1.md") + err = os.WriteFile(page1Path, []byte("# Page 1 Content"), 0644) + if err != nil { + t.Fatalf("Failed to write page1: %v", err) + } + + // Manually add a node to the tree without creating its file + // This simulates a corrupted tree structure + ghostNode := &PageNode{ + ID: "ghost-node", + Title: "Ghost", + Slug: "ghost", + Parent: service.tree, + } + service.tree.Children = append(service.tree.Children, ghostNode) + + err = service.migrateToV2() + if err != nil { + t.Fatalf("Expected migration to skip missing files gracefully, got error: %v", err) + } + + // page1 should have frontmatter now + content1, err := os.ReadFile(page1Path) + if err != nil { + t.Fatalf("Failed to read page1 after migration: %v", err) + } + fm1, body1, has1, err := ParseFrontmatter(string(content1)) + if err != nil { + t.Fatalf("Failed to parse frontmatter for page1: %v", err) + } + if !has1 { + t.Fatal("Expected page1 to have frontmatter after migration") + } + if fm1.LeafWikiID != page1.ID { + t.Fatalf("Expected leafwiki_id %q, got %q", page1.ID, fm1.LeafWikiID) + } + if !strings.Contains(body1, "# Page 1 Content") { + t.Fatalf("Expected body to be preserved") + } +} + +func TestTreeService_MigrateToV2_TreeNotLoaded(t *testing.T) { + tmpDir := t.TempDir() + service := NewTreeService(tmpDir) + // Do NOT load tree + + // Run migration should fail + err := service.migrateToV2() + if err == nil { + t.Error("Expected error when tree is not loaded") + } + if !errors.Is(err, ErrTreeNotLoaded) { + t.Errorf("Expected ErrTreeNotLoaded, got: %v", err) + } +} + +func TestTreeService_MigrateToV2_PartialFrontmatter(t *testing.T) { + tmpDir := t.TempDir() + service := NewTreeService(tmpDir) + _ = service.LoadTree() + + // Create page + _, err := service.CreatePage("system", nil, "Page1", "page1") + if err != nil { + t.Fatalf("CreatePage failed: %v", err) + } + page1 := service.GetTree().Children[0] + + // Write content with partial frontmatter (missing ID) + page1Path := filepath.Join(tmpDir, "root", "page1.md") + partialContent := "---\nleafwiki_title: Existing Title\n---\n# Page 1 Content" + err = os.WriteFile(page1Path, []byte(partialContent), 0644) + if err != nil { + t.Fatalf("Failed to write page1: %v", err) + } + + // Run migration + err = service.migrateToV2() + if err != nil { + t.Fatalf("migrateToV2 failed: %v", err) + } + + // Verify ID was added but title was preserved + content1, err := os.ReadFile(page1Path) + if err != nil { + t.Fatalf("Failed to read page1 after migration: %v", err) + } + fm1, _, _, err := ParseFrontmatter(string(content1)) + if err != nil { + t.Fatalf("Failed to parse frontmatter for page1: %v", err) + } + if fm1.LeafWikiID != page1.ID { + t.Errorf("Expected page1 frontmatter ID to be added: %s, got %s", page1.ID, fm1.LeafWikiID) + } + if fm1.LeafWikiTitle != "Existing Title" { + t.Errorf("Expected page1 frontmatter title to be preserved: 'Existing Title', got %s", fm1.LeafWikiTitle) + } +} + +func TestTreeService_MigrateToV2_EmptyTree(t *testing.T) { + tmpDir := t.TempDir() + service := NewTreeService(tmpDir) + _ = service.LoadTree() + + // Run migration on empty tree (only root, no children) + err := service.migrateToV2() + if err != nil { + t.Fatalf("migrateToV2 should succeed on empty tree, got error: %v", err) + } +} + +func TestTreeService_MigrateToV2_PreservesBodyContent(t *testing.T) { + tmpDir := t.TempDir() + service := NewTreeService(tmpDir) + _ = service.LoadTree() + + // Create page + _, err := service.CreatePage("system", nil, "Page1", "page1") + if err != nil { + t.Fatalf("CreatePage failed: %v", err) + } + + // Write complex content without frontmatter + page1Path := filepath.Join(tmpDir, "root", "page1.md") + complexContent := `# Title + +This is a paragraph. + +## Section 1 + +- Item 1 +- Item 2 + +` + "```go\nfunc main() {\n\tfmt.Println(\"Hello\")\n}\n```" + ` + +### Subsection + +More content here. + +--- + +Horizontal rule above. +` + err = os.WriteFile(page1Path, []byte(complexContent), 0644) + if err != nil { + t.Fatalf("Failed to write page1: %v", err) + } + + // Run migration + err = service.migrateToV2() + if err != nil { + t.Fatalf("migrateToV2 failed: %v", err) + } + + // Verify body content is exactly preserved + content1, err := os.ReadFile(page1Path) + if err != nil { + t.Fatalf("Failed to read page1 after migration: %v", err) + } + _, body1, _, err := ParseFrontmatter(string(content1)) + if err != nil { + t.Fatalf("Failed to parse frontmatter for page1: %v", err) + } + if body1 != complexContent { + t.Errorf("Expected body to be exactly preserved.\nGot:\n%s\n\nWant:\n%s", body1, complexContent) + } +} From 21d014edf6eac8a053e2a5e665161023462ce79e Mon Sep 17 00:00:00 2001 From: perber Date: Sat, 10 Jan 2026 20:35:33 +0100 Subject: [PATCH 03/11] feat: add backend part to support sections (#577) --- internal/core/tree/errors.go | 71 +- internal/core/tree/node_store.go | 835 ++++++++++++++ internal/core/tree/node_store_test.go | 636 +++++++++++ internal/core/tree/page_node.go | 8 + internal/core/tree/page_store.go | 337 ------ internal/core/tree/page_store_test.go | 659 ----------- internal/core/tree/tree_service.go | 639 ++++++----- internal/core/tree/tree_service_test.go | 1343 ++++++++--------------- internal/http/api/create_page.go | 4 +- internal/http/api/ensure_page.go | 4 +- internal/http/api/update_page.go | 10 +- internal/http/router_test.go | 52 +- internal/links/link_service_test.go | 154 +-- internal/search/bootstrap_test.go | 5 +- internal/wiki/wiki.go | 78 +- internal/wiki/wiki_test.go | 184 ++-- 16 files changed, 2659 insertions(+), 2360 deletions(-) create mode 100644 internal/core/tree/node_store.go create mode 100644 internal/core/tree/node_store_test.go delete mode 100644 internal/core/tree/page_store.go delete mode 100644 internal/core/tree/page_store_test.go diff --git a/internal/core/tree/errors.go b/internal/core/tree/errors.go index fd004fd7..bf3a7e5c 100644 --- a/internal/core/tree/errors.go +++ b/internal/core/tree/errors.go @@ -1,6 +1,9 @@ package tree -import "errors" +import ( + "errors" + "fmt" +) var ErrPageNotFound = errors.New("page not found") var ErrParentNotFound = errors.New("parent not found") @@ -12,3 +15,69 @@ var ErrPageCannotBeMovedToItself = errors.New("page cannot be moved to itself") var ErrInvalidSortOrder = errors.New("invalid sort order") var ErrFrontmatterParse = errors.New("frontmatter parse error") var ErrFileNotFound = errors.New("file not found") +var ErrDrift = errors.New("drift detected") +var ErrInvalidOperation = errors.New("invalid operation") +var ErrConvertNotAllowed = errors.New("convert not allowed") + +// DriftError represents a drift error with detailed information. +type DriftError struct { + NodeID string + Kind NodeKind + Path string + Reason string +} + +func (e *DriftError) Error() string { + return "drift detected: nodeID=" + e.NodeID + ", kind=" + string(e.Kind) + ", path=" + e.Path + ", reason=" + e.Reason +} + +func (e *DriftError) Unwrap() error { + return ErrDrift +} + +// InvalidOpError represents an invalid operation error with details. +type InvalidOpError struct { + Op string + Reason string +} + +func (e *InvalidOpError) Error() string { return fmt.Sprintf("%s: %s", e.Op, e.Reason) } +func (e *InvalidOpError) Unwrap() error { return ErrInvalidOperation } + +// PageAlreadyExistsError: Konflikt bei Create/Move/Rename +type PageAlreadyExistsError struct { + Path string +} + +func (e *PageAlreadyExistsError) Error() string { return fmt.Sprintf("already exists: %s", e.Path) } +func (e *PageAlreadyExistsError) Unwrap() error { return ErrPageAlreadyExists } + +// NotFoundError represents a not found error with details. +type NotFoundError struct { + Resource string + ID string + Path string +} + +func (e *NotFoundError) Error() string { + return fmt.Sprintf("%s not found: %s", e.Resource, e.ID) +} + +func (e *NotFoundError) Unwrap() error { + return ErrPageNotFound +} + +// ConvertNotAllowedError represents a convert not allowed error with details. +type ConvertNotAllowedError struct { + From NodeKind + To NodeKind + Reason string +} + +func (e *ConvertNotAllowedError) Error() string { + return fmt.Sprintf("cannot convert from %s to %s: %s", e.From, e.To, e.Reason) +} + +func (e *ConvertNotAllowedError) Unwrap() error { + return ErrConvertNotAllowed +} diff --git a/internal/core/tree/node_store.go b/internal/core/tree/node_store.go new file mode 100644 index 00000000..09e2308c --- /dev/null +++ b/internal/core/tree/node_store.go @@ -0,0 +1,835 @@ +package tree + +import ( + "encoding/json" + "errors" + "fmt" + "io" + "log/slog" + "os" + "path/filepath" + "runtime" + "strings" +) + +func fileExists(p string) bool { + _, err := os.Stat(p) + return err == nil +} + +// writeFileAtomic writes data to filename atomically by writing to a temp file +// in the same directory and then renaming it over the target. +func writeFileAtomic(filename string, data []byte, perm os.FileMode) error { + dir := filepath.Dir(filename) + + tmpFile, err := os.CreateTemp(dir, ".tmp-*") + if err != nil { + return fmt.Errorf("create temp file: %w", err) + } + + tmpName := tmpFile.Name() + // Ensure the temp file is removed in case of an error + defer func() { + _ = os.Remove(tmpName) + }() + + if perm != 0 { + if err := tmpFile.Chmod(perm); err != nil { + tmpFile.Close() + return fmt.Errorf("chmod temp file: %w", err) + } + } + + if _, err := tmpFile.Write(data); err != nil { + tmpFile.Close() + return fmt.Errorf("write temp file: %w", err) + } + + if err := tmpFile.Sync(); err != nil { + tmpFile.Close() + return fmt.Errorf("sync temp file: %w", err) + } + + if err := tmpFile.Close(); err != nil { + return fmt.Errorf("close temp file: %w", err) + } + + if err := atomicReplace(tmpName, filename); err != nil { + return fmt.Errorf("replace temp file: %w", err) + } + + return nil +} + +func atomicReplace(src, dst string) error { + // On Windows, os.Rename fails if dst already exists. + // On Unix, Rename is atomic and replaces dst. + if runtime.GOOS == "windows" { + if err := os.Remove(dst); err != nil && !os.IsNotExist(err) { + return fmt.Errorf("remove existing file: %w", err) + } + } + return os.Rename(src, dst) +} + +type ResolvedNode struct { + Kind NodeKind + DirPath string // falls folder + FilePath string // falls file (oder folder/index.md) + HasContent bool // bei folder: index.md existiert? +} + +type NodeStore struct { + storageDir string + log *slog.Logger +} + +func NewNodeStore(storageDir string) *NodeStore { + return &NodeStore{ + storageDir: storageDir, + log: slog.Default().With("component", "NodeStore"), + } +} + +func (f *NodeStore) LoadTree(filename string) (*PageNode, error) { + fullPath := filepath.Join(f.storageDir, filename) + + // check if file exists + if _, err := os.Stat(fullPath); os.IsNotExist(err) { + return &PageNode{ + ID: "root", + Slug: "root", + Title: "root", + Parent: nil, + Position: 0, + Children: []*PageNode{}, + Kind: NodeKindSection, + }, nil + } + + file, err := os.Open(fullPath) + if err != nil { + return nil, fmt.Errorf("open tree file %s: %w", fullPath, err) + } + defer file.Close() + data, err := io.ReadAll(file) + + if err != nil { + return nil, fmt.Errorf("read tree file %s: %w", fullPath, err) + } + + tree := &PageNode{} + if err := json.Unmarshal(data, tree); err != nil { + return nil, fmt.Errorf("unmarshal tree data %s: %w", fullPath, err) + } + + if tree.ID == "root" && tree.Kind == "" { + tree.Kind = NodeKindSection + } + + // assigns parent to children + f.assignParentToChildren(tree) + + return tree, nil +} + +func (f *NodeStore) assignParentToChildren(parent *PageNode) { + for _, child := range parent.Children { + child.Parent = parent + f.assignParentToChildren(child) + } +} + +func (f *NodeStore) SaveTree(filename string, tree *PageNode) error { + if tree == nil { + return errors.New("a tree is required") + } + + fullPath := filepath.Join(f.storageDir, filename) + + data, err := json.Marshal(tree) + if err != nil { + return fmt.Errorf("could not marshal tree: %w", err) + } + + if err := writeFileAtomic(fullPath, data, 0o644); err != nil { + return fmt.Errorf("could not atomically write tree file: %w", err) + } + + return nil +} + +// CreatePage creates a new page file under the given parent entry +func (f *NodeStore) CreatePage(parentEntry *PageNode, newEntry *PageNode) error { + if parentEntry == nil { + return &InvalidOpError{Op: "CreatePage", Reason: "a parent entry is required"} + } + if newEntry == nil { + return &InvalidOpError{Op: "CreatePage", Reason: "a new entry is required"} + } + if newEntry.ID == "root" { + return &InvalidOpError{Op: "CreatePage", Reason: "cannot create root"} + } + + // Pages can only be created under sections (Option A) + if parentEntry.Kind != NodeKindSection { + return &InvalidOpError{Op: "CreatePage", Reason: "parent entry must be a section"} + } + if newEntry.Kind != NodeKindPage { + return &InvalidOpError{Op: "CreatePage", Reason: "new entry must be a page"} + } + + // Parent directory is determined by the tree path + parentDir, err := f.dirPathForNode(parentEntry) + if err != nil { + return err + } + + // Ensure the parent directory exists (idempotent) + if err := os.MkdirAll(parentDir, 0o755); err != nil { + return fmt.Errorf("could not ensure parent directory exists: %w", err) + } + + // Destination paths + destBase := filepath.Join(parentDir, newEntry.Slug) + destFile := destBase + ".md" + destDir := destBase + + // Reject if either a file OR a directory with same slug exists + if fileExists(destFile) || fileExists(destDir) { + return &PageAlreadyExistsError{Path: destBase} + } + + // Build and write file + fm := Frontmatter{LeafWikiID: newEntry.ID} + md, err := BuildMarkdownWithFrontmatter(fm, "# "+newEntry.Title+"\n") + if err != nil { + return fmt.Errorf("could not build markdown with frontmatter: %w", err) + } + + if err := writeFileAtomic(destFile, []byte(md), 0o644); err != nil { + return fmt.Errorf("could not create file: %w", err) + } + + return nil +} + +// CreateSection creates a new section (folder) under the given parent entry. +func (f *NodeStore) CreateSection(parentEntry *PageNode, newEntry *PageNode) error { + if parentEntry == nil { + return &InvalidOpError{Op: "CreateSection", Reason: "a parent entry is required"} + } + if newEntry == nil { + return &InvalidOpError{Op: "CreateSection", Reason: "a new entry is required"} + } + if newEntry.ID == "root" { + return &InvalidOpError{Op: "CreateSection", Reason: "cannot create root"} + } + + // Sections can only be created under sections (Option A) + if parentEntry.Kind != NodeKindSection { + return &InvalidOpError{Op: "CreateSection", Reason: "parent entry must be a section"} + } + if newEntry.Kind != NodeKindSection { + return &InvalidOpError{Op: "CreateSection", Reason: "new entry must be a section"} + } + + // Parent directory from tree path + parentDir, err := f.dirPathForNode(parentEntry) + if err != nil { + return err + } + + // Ensure parent directory exists (idempotent) + if err := os.MkdirAll(parentDir, 0o755); err != nil { + return fmt.Errorf("could not ensure parent directory exists: %w", err) + } + + // Destination base paths + destBase := filepath.Join(parentDir, newEntry.Slug) + destFile := destBase + ".md" + destDir := destBase + + // Reject if either a file OR a directory with same slug exists + if fileExists(destFile) || fileExists(destDir) { + return &PageAlreadyExistsError{Path: destBase} + } + + // Create the folder for the section (no index.md by default) + if err := os.MkdirAll(destDir, 0o755); err != nil { + return fmt.Errorf("could not create section folder: %w", err) + } + + return nil +} + +// UpsertContent updates the content of a page file on disk +// It creates the file if it does not exist also for sections (index.md) +func (f *NodeStore) UpsertContent(entry *PageNode, content string) error { + if entry == nil { + return &InvalidOpError{Op: "UpsertContent", Reason: "an entry is required"} + } + + // Determine expected write path + filePath, err := f.contentPathForNodeWrite(entry) + if err != nil { + return err + } + + mode := os.FileMode(0o644) + if st, err := os.Stat(filePath); err == nil { + mode = st.Mode() + } + + // Update the file content + fm := Frontmatter{LeafWikiID: strings.TrimSpace(entry.ID), LeafWikiTitle: strings.TrimSpace(entry.Title)} + contentWithFM, err := BuildMarkdownWithFrontmatter(fm, content) + if err != nil { + return fmt.Errorf("could not build markdown with frontmatter: %w", err) + } + if err := writeFileAtomic(filePath, []byte(contentWithFM), mode); err != nil { + return fmt.Errorf("could not write to file atomically: %w", err) + } + + return nil +} + +// MoveNode moves a page to a other node +func (f *NodeStore) MoveNode(entry *PageNode, parentEntry *PageNode) error { + if entry == nil { + return &InvalidOpError{Op: "MoveNode", Reason: "an entry is required"} + } + if parentEntry == nil { + return &InvalidOpError{Op: "MoveNode", Reason: "a parent entry is required"} + } + if entry.ID == "root" { + return &InvalidOpError{Op: "MoveNode", Reason: "cannot move root"} + } + + // Option A: children only under sections (defensive guard) + if parentEntry.Kind != NodeKindSection { + return &InvalidOpError{Op: "MoveNode", Reason: fmt.Sprintf("parent entry must be a section, got %q", parentEntry.Kind)} + } + + // Parent directory path from tree + parentDir, err := f.dirPathForNode(parentEntry) + if err != nil { + return err + } + + if err := os.MkdirAll(parentDir, 0o755); err != nil { + return fmt.Errorf("could not ensure parent directory exists: %w", err) + } + + // Current base path from tree (still at old location; TreeService updates Parent after success) + oldBase, err := f.dirPathForNode(entry) + if err != nil { + return err + } + oldFile := oldBase + ".md" + oldDir := oldBase + + // Destination base path (same slug, under new parent) + destBase := filepath.Join(parentDir, entry.Slug) + destFile := destBase + ".md" + destDir := destBase + + // Collision checks: refuse if destination already exists as file OR dir + if fileExists(destFile) || fileExists(destDir) { + return &PageAlreadyExistsError{Path: destBase} + } + + // STRICT: follow tree.Kind exactly (no disk fallbacks) + switch entry.Kind { + case NodeKindSection: + // src must be a directory + info, err := os.Stat(oldDir) + if err != nil { + if os.IsNotExist(err) { + f.log.Warn("move drift: expected folder missing", "nodeID", entry.ID, "expectedDir", oldDir) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: oldDir, Reason: "expected folder missing"} + } + return fmt.Errorf("stat source dir: %w", err) + } + if !info.IsDir() { + f.log.Warn("move drift: expected folder but found file", "nodeID", entry.ID, "expectedDir", oldDir) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: oldDir, Reason: "expected folder but found file"} + } + + if err := os.Rename(oldDir, destDir); err != nil { + return fmt.Errorf("could not move folder: %w", err) + } + + case NodeKindPage: + // src must be a file + info, err := os.Stat(oldFile) + if err != nil { + if os.IsNotExist(err) { + f.log.Warn("move drift: expected file missing", "nodeID", entry.ID, "expectedFile", oldFile) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: oldFile, Reason: "expected file missing"} + } + return fmt.Errorf("stat source file: %w", err) + } + if info.IsDir() { + f.log.Warn("move drift: expected file but found folder", "nodeID", entry.ID, "expectedFile", oldFile) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: oldFile, Reason: "expected file but found folder"} + } + + if err := os.Rename(oldFile, destFile); err != nil { + return fmt.Errorf("could not move file: %w", err) + } + + default: + return &InvalidOpError{Op: "MoveNode", Reason: fmt.Sprintf("unknown node kind: %q", entry.Kind)} + } + + return nil +} + +// DeletePage deletes a page file from disk +func (f *NodeStore) DeletePage(entry *PageNode) error { + if entry == nil { + return &InvalidOpError{Op: "DeletePage", Reason: "an entry is required"} + } + if entry.ID == "root" { + return &InvalidOpError{Op: "DeletePage", Reason: "cannot delete root"} + } + if entry.Kind != NodeKindPage && entry.Kind != "" { + return &InvalidOpError{Op: "DeletePage", Reason: "entry must be a page"} + } + + base, err := f.dirPathForNode(entry) + if err != nil { + return err + } + file := base + ".md" + + info, err := os.Stat(file) + if err != nil { + if os.IsNotExist(err) { + f.log.Warn("delete drift: expected page file missing", "nodeID", entry.ID, "expectedFile", file) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: file, Reason: "expected file missing"} + } + return fmt.Errorf("stat file: %w", err) + } + if info.IsDir() { + f.log.Warn("delete drift: expected file but found folder", "nodeID", entry.ID, "expectedFile", file) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: file, Reason: "expected file but found folder"} + } + + if err := os.Remove(file); err != nil { + return fmt.Errorf("could not delete file: %w", err) + } + + return nil +} + +// DeleteSection deletes a section folder from disk +func (f *NodeStore) DeleteSection(entry *PageNode) error { + if entry == nil { + return &InvalidOpError{Op: "DeleteSection", Reason: "an entry is required"} + } + if entry.ID == "root" { + return &InvalidOpError{Op: "DeleteSection", Reason: "cannot delete root"} + } + if entry.Kind != NodeKindSection { + return &InvalidOpError{Op: "DeleteSection", Reason: "entry must be a section"} + } + + dir, err := f.dirPathForNode(entry) + if err != nil { + return err + } + + info, err := os.Stat(dir) + if err != nil { + if os.IsNotExist(err) { + f.log.Warn("delete drift: expected section folder missing", "nodeID", entry.ID, "expectedDir", dir) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: dir, Reason: "expected folder missing"} + } + return fmt.Errorf("stat dir: %w", err) + } + if !info.IsDir() { + f.log.Warn("delete drift: expected folder but found file", "nodeID", entry.ID, "expectedDir", dir) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: dir, Reason: "expected folder but found file"} + } + + if err := os.RemoveAll(dir); err != nil { + return fmt.Errorf("could not delete folder: %w", err) + } + + return nil +} + +// RenameNode renames a node's slug on disk +func (f *NodeStore) RenameNode(entry *PageNode, newSlug string) error { + if entry == nil { + return &InvalidOpError{Op: "RenameNode", Reason: "an entry is required"} + } + if strings.TrimSpace(newSlug) == "" { + return &InvalidOpError{Op: "RenameNode", Reason: "new slug must not be empty"} + } + if entry.Slug == newSlug { + return nil + } + if entry.ID == "root" { + return &InvalidOpError{Op: "RenameNode", Reason: "cannot rename root"} + } + + // old base path computed from current entry (still has old slug) + oldBase, err := f.dirPathForNode(entry) + if err != nil { + return err + } + + // new base path: same parent dir, last segment replaced + newBase := filepath.Join(filepath.Dir(oldBase), newSlug) + + // destination collision checks + if fileExists(newBase+".md") || fileExists(newBase) { + return &PageAlreadyExistsError{Path: newBase} + } + // perform rename based on kind + switch entry.Kind { + case NodeKindSection: + srcDir := oldBase + dstDir := newBase + + // strict: source dir must exist and be dir + info, err := os.Stat(srcDir) + if err != nil { + if os.IsNotExist(err) { + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: srcDir, Reason: "expected folder missing"} + } + return fmt.Errorf("stat source dir: %w", err) + } + if !info.IsDir() { + // drift: tree says section but disk is not a folder + f.log.Warn("drift: tree says section but disk is not a folder", "srcDir", srcDir) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: srcDir, Reason: "expected folder but found file"} + } + + if err := os.Rename(srcDir, dstDir); err != nil { + return fmt.Errorf("could not rename folder: %w", err) + } + return nil + case NodeKindPage: + srcFile := oldBase + ".md" + dstFile := newBase + ".md" + + // strict: source file must exist + info, err := os.Stat(srcFile) + if err != nil { + if os.IsNotExist(err) { + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: srcFile, Reason: "expected file missing"} + } + return fmt.Errorf("stat source file: %w", err) + } + if info.IsDir() { + // drift: tree says page but disk is a dir + f.log.Warn("drift: tree says page but disk is a dir", "srcFile", srcFile) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: srcFile, Reason: "expected file but found folder"} + } + + if err := os.Rename(srcFile, dstFile); err != nil { + return fmt.Errorf("could not rename file: %w", err) + } + return nil + + default: + return &InvalidOpError{Op: "RenameNode", Reason: fmt.Sprintf("unknown node kind: %q", entry.Kind)} + } +} + +// ReadPageRaw returns the raw content of a page including frontmatter +func (f *NodeStore) ReadPageRaw(entry *PageNode) (string, error) { + filePath, err := f.contentPathForNodeRead(entry) + if err != nil { + return "", err + } + + // Sections may legitimately have no content (missing index.md) + if entry.Kind == NodeKindSection { + if !fileExists(filePath) { + return "", nil + } + } else { + // Pages must have a content file + if !fileExists(filePath) { + return "", &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: filePath, Reason: "expected page file missing"} + } + } + + raw, err := os.ReadFile(filePath) + if err != nil { + return "", err + } + return string(raw), nil +} + +// ReadPageContent returns the content of a page +func (f *NodeStore) ReadPageContent(entry *PageNode) (string, error) { + raw, err := f.ReadPageRaw(entry) + if err != nil { + return "", err + } + _, content, _, err := ParseFrontmatter(string(raw)) + if err != nil { + return string(raw), err + } + return content, nil +} + +// SyncFrontmatterIfExists updates the frontmatter of a page file on disk if it exists +func (f *NodeStore) SyncFrontmatterIfExists(entry *PageNode) error { + if entry == nil { + return &InvalidOpError{Op: "SyncFrontmatterIfExists", Reason: "an entry is required"} + } + + // keine side effects: write-path NICHT verwenden (würde mkdir + bei Section implizit index.md Pfad liefern) + // aber read-path reicht, weil wir nur syncen, wenn Datei existiert + filePath, err := f.contentPathForNodeRead(entry) + if err != nil { + return err + } + + // Datei existiert? + if !fileExists(filePath) { + // Page: muss existieren + if entry.Kind == NodeKindPage || entry.Kind == "" { + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: filePath, Reason: "expected page file missing"} + } + // Section: kein index.md -> NICHT erzeugen + return nil + } + + raw, err := os.ReadFile(filePath) + if err != nil { + return fmt.Errorf("read content file: %w", err) + } + + fm, body, has, err := ParseFrontmatter(string(raw)) + if err != nil { + return fmt.Errorf("parse frontmatter: %w", err) + } + if !has { + fm = Frontmatter{} + } + + // Tree-SoT invariants + fm.LeafWikiID = strings.TrimSpace(entry.ID) + fm.LeafWikiTitle = strings.TrimSpace(entry.Title) + + out, err := BuildMarkdownWithFrontmatter(fm, body) + if err != nil { + return fmt.Errorf("build markdown: %w", err) + } + + mode := os.FileMode(0o644) + if st, err := os.Stat(filePath); err == nil { + mode = st.Mode() + } + + if err := writeFileAtomic(filePath, []byte(out), mode); err != nil { + return fmt.Errorf("write file atomically: %w", err) + } + return nil +} + +func (f *NodeStore) dirPathForNode(entry *PageNode) (string, error) { + if entry == nil { + return "", &InvalidOpError{Op: "dirPathForNode", Reason: "an entry is required"} + } + return filepath.Join(f.storageDir, GeneratePathFromPageNode(entry)), nil +} + +// contentPathForNodeRead returns the expected content file path for a node +// based purely on the tree Kind (NO side effects, NO mkdir): +// - page => .md +// - section => /index.md +func (f *NodeStore) contentPathForNodeRead(entry *PageNode) (string, error) { + if entry == nil { + return "", &InvalidOpError{Op: "contentPathForNodeRead", Reason: "an entry is required"} + } + + base, err := f.dirPathForNode(entry) + if err != nil { + return "", err + } + switch entry.Kind { + case NodeKindSection: + return filepath.Join(base, "index.md"), nil + case NodeKindPage: + return base + ".md", nil + default: + return "", &InvalidOpError{Op: "contentPathForNodeRead", Reason: fmt.Sprintf("unknown node kind: %q", entry.Kind)} + } +} + +// contentPathForNodeWrite returns the expected content file path for a node +// based purely on the tree Kind (MAY create dirs for sections): +// - page => .md +// - section => /index.md (ensures directory exists) +func (f *NodeStore) contentPathForNodeWrite(entry *PageNode) (string, error) { + if entry == nil { + return "", &InvalidOpError{Op: "contentPathForNodeWrite", Reason: "an entry is required"} + } + + base, err := f.dirPathForNode(entry) + if err != nil { + return "", err + } + switch entry.Kind { + case NodeKindSection: + if err := os.MkdirAll(base, 0o755); err != nil { + return "", fmt.Errorf("could not ensure folder: %w", err) + } + return filepath.Join(base, "index.md"), nil + + case NodeKindPage: + return base + ".md", nil + + default: + return "", &InvalidOpError{Op: "contentPathForNodeWrite", Reason: fmt.Sprintf("unknown node kind: %q", entry.Kind)} + } +} + +// resolveNode inspects the filesystem to determine if the given PageNode +// corresponds to a file or folder, returning a ResolvedNode with details. +// This function is only used for migration. Other parts of the system should rely on contentPathForNodeRead or contentPathForNodeWrite. +// If this function is used outside of migration, it may lead to inconsistencies between the tree and the actual filesystem state. +func (f *NodeStore) resolveNode(entry *PageNode) (*ResolvedNode, error) { + basePath, err := f.dirPathForNode(entry) + if err != nil { + return nil, err + } + + // 1) File? + if _, err := os.Stat(basePath + ".md"); err == nil { + f.log.Debug("resolved as file node", "filePath", basePath+".md") + return &ResolvedNode{ + Kind: NodeKindPage, + FilePath: basePath + ".md", + HasContent: true, + }, nil + } + + // 2) Folder? + if info, err := os.Stat(basePath); err == nil && info.IsDir() { + index := filepath.Join(basePath, "index.md") + if _, err := os.Stat(index); err == nil { + f.log.Debug("resolved as section node with content", "dirPath", basePath, "filePath", index) + return &ResolvedNode{ + Kind: NodeKindSection, + DirPath: basePath, + FilePath: index, + HasContent: true, + }, nil + } + f.log.Debug("resolved as section node without content", "dirPath", basePath) + return &ResolvedNode{ + Kind: NodeKindSection, + DirPath: basePath, + FilePath: "", // no index.md present + HasContent: false, + }, nil + } + + return nil, &NotFoundError{Resource: "node", Path: basePath, ID: entry.ID} +} + +// ConvertNode converts the on-disk representation between page <-> folder. +// NOTE: TreeService must ensure folder->page is allowed (no children). +func (f *NodeStore) ConvertNode(entry *PageNode, target NodeKind) error { + if entry == nil { + return &InvalidOpError{Op: "ConvertNode", Reason: "an entry is required"} + } + + base, err := f.dirPathForNode(entry) + if err != nil { + return err + } + filePath := base + ".md" + folderPath := base + indexPath := filepath.Join(folderPath, "index.md") + + switch target { + case NodeKindSection: + // page -> folder + if _, err := os.Stat(filePath); err == nil { + if err := os.MkdirAll(folderPath, 0o755); err != nil { + return fmt.Errorf("could not create folder: %w", err) + } + // keep content: .md -> /index.md + if err := os.Rename(filePath, indexPath); err != nil { + return fmt.Errorf("could not move page into folder: %w", err) + } + return nil + } + // already folder (or missing) -> ensure dir exists + if err := os.MkdirAll(folderPath, 0o755); err != nil { + return fmt.Errorf("could not ensure folder exists: %w", err) + } + return nil + + case NodeKindPage: + // folder -> page (strict, safe order) + info, err := os.Stat(folderPath) + if err != nil { + if os.IsNotExist(err) { + // nothing to do if folder doesn't exist + return nil + } + return err + } + if !info.IsDir() { + return &DriftError{NodeID: entry.ID, Kind: NodeKindSection, Path: folderPath, Reason: "expected folder but found file"} + } + + entries, err := os.ReadDir(folderPath) + if err != nil { + return err + } + + // allow only: + // - empty folder + // - folder with only index.md + allowed := true + for _, e := range entries { + name := e.Name() + if name == "index.md" { + continue + } + allowed = false + break + } + if !allowed { + return &ConvertNotAllowedError{From: NodeKindSection, To: NodeKindPage, Reason: "folder not empty"} + } + + // now do the move/create + if fileExists(indexPath) { + if err := os.Rename(indexPath, filePath); err != nil { + return fmt.Errorf("could not move index to page: %w", err) + } + } else { + fm := Frontmatter{LeafWikiID: entry.ID, LeafWikiTitle: entry.Title} + md, err := BuildMarkdownWithFrontmatter(fm, "") + if err != nil { + return err + } + if err := writeFileAtomic(filePath, []byte(md), 0o644); err != nil { + return fmt.Errorf("could not write page file: %w", err) + } + } + + // remove folder (must be empty now) + if err := os.Remove(folderPath); err != nil { + return err + } + return nil + + default: + return &InvalidOpError{Op: "ConvertNode", Reason: fmt.Sprintf("unknown target kind: %q", target)} + } +} diff --git a/internal/core/tree/node_store_test.go b/internal/core/tree/node_store_test.go new file mode 100644 index 00000000..b4b045da --- /dev/null +++ b/internal/core/tree/node_store_test.go @@ -0,0 +1,636 @@ +package tree + +import ( + "errors" + "os" + "path/filepath" + "runtime" + "strings" + "testing" +) + +func mustWriteFile(t *testing.T, path string, data string, perm os.FileMode) { + t.Helper() + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + if err := os.WriteFile(path, []byte(data), perm); err != nil { + t.Fatalf("write file: %v", err) + } +} + +func mustMkdir(t *testing.T, path string) { + t.Helper() + if err := os.MkdirAll(path, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } +} + +func TestNodeStore_LoadTree_MissingFile_ReturnsDefaultRoot(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + tree, err := store.LoadTree("missing.json") + if err != nil { + t.Fatalf("LoadTree: %v", err) + } + if tree == nil { + t.Fatalf("expected tree, got nil") + } + if tree.ID != "root" || tree.Slug != "root" || tree.Title != "root" { + t.Fatalf("unexpected default root: %#v", tree) + } + if tree.Kind != NodeKindSection { + t.Fatalf("expected root kind %q, got %q", NodeKindSection, tree.Kind) + } + if tree.Parent != nil { + t.Fatalf("expected root parent nil") + } + if len(tree.Children) != 0 { + t.Fatalf("expected no children") + } +} + +func TestNodeStore_SaveTree_ThenLoadTree_AssignsParents(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + tree := &PageNode{ + ID: "root", + Slug: "root", + Title: "root", + Kind: NodeKindSection, + Children: []*PageNode{ + { + ID: "s1", + Slug: "sec", + Title: "Section", + Kind: NodeKindSection, + Children: []*PageNode{ + { + ID: "p1", + Slug: "page", + Title: "Page", + Kind: NodeKindPage, + }, + }, + }, + }, + } + + if err := store.SaveTree("tree.json", tree); err != nil { + t.Fatalf("SaveTree: %v", err) + } + + loaded, err := store.LoadTree("tree.json") + if err != nil { + t.Fatalf("LoadTree: %v", err) + } + + sec := loaded.Children[0] + p := sec.Children[0] + + if sec.Parent == nil || sec.Parent.ID != "root" { + t.Fatalf("expected section parent root, got %#v", sec.Parent) + } + if p.Parent == nil || p.Parent.ID != "s1" { + t.Fatalf("expected page parent s1, got %#v", p.Parent) + } +} + +func TestNodeStore_SaveTree_NilTree_Error(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + if err := store.SaveTree("tree.json", nil); err == nil { + t.Fatalf("expected error, got nil") + } +} + +func TestNodeStore_CreateSection_CreatesFolder_NoIndexByDefault(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + sec := &PageNode{ID: "sec1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + if err := store.CreateSection(root, sec); err != nil { + t.Fatalf("CreateSection: %v", err) + } + + // expected folder: /root/docs + dir := filepath.Join(tmp, "root", "docs") + if st, err := os.Stat(dir); err != nil || !st.IsDir() { + t.Fatalf("expected section folder at %s", dir) + } + + // no index.md by default + index := filepath.Join(dir, "index.md") + if _, err := os.Stat(index); err == nil { + t.Fatalf("did not expect index.md to exist by default: %s", index) + } +} + +func TestNodeStore_CreateSection_KindGuards(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + rootPageWrong := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindPage} + sec := &PageNode{ID: "sec1", Slug: "docs", Title: "Docs", Kind: NodeKindSection} + + if err := store.CreateSection(rootPageWrong, sec); err == nil { + t.Fatalf("expected error when parent is not a section") + } + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + pageWrong := &PageNode{ID: "x", Slug: "x", Title: "X", Kind: NodeKindPage} + if err := store.CreateSection(root, pageWrong); err == nil { + t.Fatalf("expected error when new entry is not a section") + } +} + +func TestNodeStore_CreatePage_CreatesMarkdownWithFrontmatter(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + page := &PageNode{ID: "p1", Slug: "hello", Title: "Hello World", Kind: NodeKindPage, Parent: root} + + if err := store.CreatePage(root, page); err != nil { + t.Fatalf("CreatePage: %v", err) + } + + p := filepath.Join(tmp, "root", "hello.md") + raw, err := os.ReadFile(p) + if err != nil { + t.Fatalf("read created page: %v", err) + } + + fm, body, has, err := ParseFrontmatter(string(raw)) + if err != nil { + t.Fatalf("ParseFrontmatter: %v", err) + } + if !has { + t.Fatalf("expected frontmatter") + } + if strings.TrimSpace(fm.LeafWikiID) != "p1" { + t.Fatalf("expected leafwiki_id p1, got %q", fm.LeafWikiID) + } + // CreatePage setzt nur ID im FM, Title kommt in den Body als H1 + if !strings.Contains(body, "# Hello World") { + t.Fatalf("expected H1 title in body, got: %q", body) + } +} + +func TestNodeStore_CreatePage_RejectsCollision_FileOrDir(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + + // collision as file + mustWriteFile(t, filepath.Join(tmp, "root", "dup.md"), "x", 0o644) + page := &PageNode{ID: "p1", Slug: "dup", Title: "Dup", Kind: NodeKindPage, Parent: root} + if err := store.CreatePage(root, page); err == nil { + t.Fatalf("expected PageAlreadyExistsError for existing file") + } + + // collision as dir + mustMkdir(t, filepath.Join(tmp, "root", "dupdir")) + page2 := &PageNode{ID: "p2", Slug: "dupdir", Title: "DupDir", Kind: NodeKindPage, Parent: root} + if err := store.CreatePage(root, page2); err == nil { + t.Fatalf("expected PageAlreadyExistsError for existing dir") + } +} + +func TestNodeStore_UpsertContent_Page_CreatesOrUpdates_PreservesMode(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + page := &PageNode{ID: "p1", Slug: "p", Title: "My Page", Kind: NodeKindPage, Parent: root} + + // create with custom mode + path := filepath.Join(tmp, "root", "p.md") + mustWriteFile(t, path, "# old", 0o600) + + if err := store.UpsertContent(page, "# new"); err != nil { + t.Fatalf("UpsertContent: %v", err) + } + + st, err := os.Stat(path) + if err != nil { + t.Fatalf("stat: %v", err) + } + // permissions should stay (best-effort; Windows behaves differently sometimes) + if runtime.GOOS != "windows" { + if st.Mode().Perm() != 0o600 { + t.Fatalf("expected perm 0600, got %o", st.Mode().Perm()) + } + } + + raw, _ := os.ReadFile(path) + fm, body, has, err := ParseFrontmatter(string(raw)) + if err != nil { + t.Fatalf("ParseFrontmatter: %v", err) + } + if !has { + t.Fatalf("expected FM to exist") + } + if fm.LeafWikiID != "p1" { + t.Fatalf("expected id p1, got %q", fm.LeafWikiID) + } + if fm.LeafWikiTitle != "My Page" { + t.Fatalf("expected title 'My Page', got %q", fm.LeafWikiTitle) + } + if strings.TrimSpace(body) != "# new" { + t.Fatalf("expected body '# new', got %q", body) + } +} + +func TestNodeStore_UpsertContent_Section_WritesIndexAndCreatesDir(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + sec := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + if err := store.UpsertContent(sec, "# docs"); err != nil { + t.Fatalf("UpsertContent: %v", err) + } + + index := filepath.Join(tmp, "root", "docs", "index.md") + if _, err := os.Stat(index); err != nil { + t.Fatalf("expected index.md to exist: %v", err) + } +} + +func TestNodeStore_MoveNode_Page_MovesFileStrict(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + secA := &PageNode{ID: "a", Slug: "a", Title: "A", Kind: NodeKindSection, Parent: root} + secB := &PageNode{ID: "b", Slug: "b", Title: "B", Kind: NodeKindSection, Parent: root} + page := &PageNode{ID: "p1", Slug: "p", Title: "P", Kind: NodeKindPage, Parent: secA} + + // create source file at old location (tree-based path) + src := filepath.Join(tmp, "root", "a", "p.md") + mustWriteFile(t, src, "# hi", 0o644) + + if err := store.MoveNode(page, secB); err != nil { + t.Fatalf("MoveNode: %v", err) + } + + dst := filepath.Join(tmp, "root", "b", "p.md") + if _, err := os.Stat(dst); err != nil { + t.Fatalf("expected dest file: %v", err) + } + if _, err := os.Stat(src); !os.IsNotExist(err) { + t.Fatalf("expected src removed") + } +} + +func TestNodeStore_MoveNode_DriftWhenMissingSource(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + sec := &PageNode{ID: "s", Slug: "s", Title: "S", Kind: NodeKindSection, Parent: root} + page := &PageNode{ID: "p1", Slug: "p", Title: "P", Kind: NodeKindPage, Parent: sec} + + err := store.MoveNode(page, root) + if err == nil { + t.Fatalf("expected DriftError, got nil") + } + var de *DriftError + if !errors.As(err, &de) { + t.Fatalf("expected DriftError, got %T: %v", err, err) + } +} + +func TestNodeStore_DeletePage_RemovesFile_OrDriftIfMissing(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + page := &PageNode{ID: "p1", Slug: "p", Title: "P", Kind: NodeKindPage, Parent: root} + + path := filepath.Join(tmp, "root", "p.md") + mustWriteFile(t, path, "# x", 0o644) + + if err := store.DeletePage(page); err != nil { + t.Fatalf("DeletePage: %v", err) + } + if _, err := os.Stat(path); !os.IsNotExist(err) { + t.Fatalf("expected file deleted") + } + + // delete again -> drift + err := store.DeletePage(page) + if err == nil { + t.Fatalf("expected DriftError") + } +} + +func TestNodeStore_DeleteSection_RemovesFolderRecursive_OrDriftIfMissing(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + sec := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + dir := filepath.Join(tmp, "root", "docs") + mustMkdir(t, dir) + mustWriteFile(t, filepath.Join(dir, "index.md"), "# hi", 0o644) + mustWriteFile(t, filepath.Join(dir, "nested.txt"), "x", 0o644) + + if err := store.DeleteSection(sec); err != nil { + t.Fatalf("DeleteSection: %v", err) + } + if _, err := os.Stat(dir); !os.IsNotExist(err) { + t.Fatalf("expected folder deleted") + } + + err := store.DeleteSection(sec) + if err == nil { + t.Fatalf("expected DriftError") + } +} + +func TestNodeStore_RenameNode_PageAndSection(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + + // page rename + page := &PageNode{ID: "p1", Slug: "old", Title: "P", Kind: NodeKindPage, Parent: root} + oldFile := filepath.Join(tmp, "root", "old.md") + mustWriteFile(t, oldFile, "# x", 0o644) + + if err := store.RenameNode(page, "new"); err != nil { + t.Fatalf("RenameNode(page): %v", err) + } + if _, err := os.Stat(filepath.Join(tmp, "root", "new.md")); err != nil { + t.Fatalf("expected new page file") + } + + // section rename + sec := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + secDir := filepath.Join(tmp, "root", "docs") + mustMkdir(t, secDir) + mustWriteFile(t, filepath.Join(secDir, "index.md"), "# y", 0o644) + + if err := store.RenameNode(sec, "docs2"); err != nil { + t.Fatalf("RenameNode(section): %v", err) + } + if st, err := os.Stat(filepath.Join(tmp, "root", "docs2")); err != nil || !st.IsDir() { + t.Fatalf("expected renamed section dir") + } +} + +func TestNodeStore_ReadPageRaw_Section_NoIndex_ReturnsEmptyNil(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + sec := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + // folder exists, but no index.md + mustMkdir(t, filepath.Join(tmp, "root", "docs")) + + raw, err := store.ReadPageRaw(sec) + if err != nil { + t.Fatalf("ReadPageRaw: %v", err) + } + if raw != "" { + t.Fatalf("expected empty raw for section without index, got %q", raw) + } +} + +func TestNodeStore_ReadPageRaw_Page_Missing_IsDrift(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + page := &PageNode{ID: "p1", Slug: "p", Title: "P", Kind: NodeKindPage, Parent: root} + + _, err := store.ReadPageRaw(page) + if err == nil { + t.Fatalf("expected DriftError") + } +} + +func TestNodeStore_SyncFrontmatterIfExists_Page_UpdatesOrAddsFM(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + page := &PageNode{ID: "p1", Slug: "p", Title: "Title A", Kind: NodeKindPage, Parent: root} + + path := filepath.Join(tmp, "root", "p.md") + + // file without FM + mustWriteFile(t, path, "# Body\nHello", 0o644) + + if err := store.SyncFrontmatterIfExists(page); err != nil { + t.Fatalf("SyncFrontmatterIfExists: %v", err) + } + + raw := string(mustRead(t, path)) + fm, body, has, err := ParseFrontmatter(raw) + if err != nil { + t.Fatalf("ParseFrontmatter: %v", err) + } + if !has { + t.Fatalf("expected fm after sync") + } + if fm.LeafWikiID != "p1" || fm.LeafWikiTitle != "Title A" { + t.Fatalf("unexpected fm: %#v", fm) + } + if strings.TrimSpace(body) != "# Body\nHello" { + t.Fatalf("body changed unexpectedly: %q", body) + } + + // update title and id + page.Title = "Title B" + page.ID = "p1b" + if err := store.SyncFrontmatterIfExists(page); err != nil { + t.Fatalf("SyncFrontmatterIfExists(update): %v", err) + } + raw2 := string(mustRead(t, path)) + fm2, body2, has2, err := ParseFrontmatter(raw2) + if err != nil { + t.Fatalf("ParseFrontmatter: %v", err) + } + if !has2 || fm2.LeafWikiID != "p1b" || fm2.LeafWikiTitle != "Title B" { + t.Fatalf("expected updated fm, got %#v", fm2) + } + if strings.TrimSpace(body2) != "# Body\nHello" { + t.Fatalf("body changed unexpectedly on update: %q", body2) + } +} + +func TestNodeStore_SyncFrontmatterIfExists_Section_NoIndex_NoSideEffects(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + sec := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + // Do NOT create folder: sync must not mkdir via write-path; should return nil. + if err := store.SyncFrontmatterIfExists(sec); err != nil { + t.Fatalf("SyncFrontmatterIfExists(section): %v", err) + } + // Ensure no folder created implicitly + if _, err := os.Stat(filepath.Join(tmp, "root", "docs")); err == nil { + t.Fatalf("expected no side effects (folder created), but folder exists") + } +} + +func TestNodeStore_resolveNode_FileVsFolder(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + + page := &PageNode{ID: "p1", Slug: "p", Title: "P", Kind: NodeKindPage, Parent: root} + mustWriteFile(t, filepath.Join(tmp, "root", "p.md"), "# x", 0o644) + + r1, err := store.resolveNode(page) + if err != nil { + t.Fatalf("resolveNode(page): %v", err) + } + if r1.Kind != NodeKindPage || !r1.HasContent || !strings.HasSuffix(r1.FilePath, "p.md") { + t.Fatalf("unexpected resolved: %#v", r1) + } + + sec := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + secDir := filepath.Join(tmp, "root", "docs") + mustMkdir(t, secDir) + + r2, err := store.resolveNode(sec) + if err != nil { + t.Fatalf("resolveNode(sec without index): %v", err) + } + if r2.Kind != NodeKindSection || r2.HasContent { + t.Fatalf("expected section without content: %#v", r2) + } + + mustWriteFile(t, filepath.Join(secDir, "index.md"), "# idx", 0o644) + r3, err := store.resolveNode(sec) + if err != nil { + t.Fatalf("resolveNode(sec with index): %v", err) + } + if r3.Kind != NodeKindSection || !r3.HasContent || !strings.HasSuffix(r3.FilePath, "index.md") { + t.Fatalf("unexpected resolved: %#v", r3) + } +} + +func TestNodeStore_ConvertNode_PageToSection_MovesToIndex(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + entry := &PageNode{ID: "p1", Slug: "p", Title: "P", Kind: NodeKindPage, Parent: root} + + file := filepath.Join(tmp, "root", "p.md") + mustWriteFile(t, file, "# hi", 0o644) + + if err := store.ConvertNode(entry, NodeKindSection); err != nil { + t.Fatalf("ConvertNode(page->section): %v", err) + } + + index := filepath.Join(tmp, "root", "p", "index.md") + if _, err := os.Stat(index); err != nil { + t.Fatalf("expected index at %s", index) + } + if _, err := os.Stat(file); !os.IsNotExist(err) { + t.Fatalf("expected old file removed") + } +} + +func TestNodeStore_ConvertNode_SectionToPage_RejectsNonEmptyFolder(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + entry := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + dir := filepath.Join(tmp, "root", "docs") + mustMkdir(t, dir) + mustWriteFile(t, filepath.Join(dir, "index.md"), "# idx", 0o644) + mustWriteFile(t, filepath.Join(dir, "other.txt"), "nope", 0o644) + + err := store.ConvertNode(entry, NodeKindPage) + if err == nil { + t.Fatalf("expected ConvertNotAllowedError") + } + var cna *ConvertNotAllowedError + if !errors.As(err, &cna) { + t.Fatalf("expected ConvertNotAllowedError, got %T: %v", err, err) + } +} + +func TestNodeStore_ConvertNode_SectionToPage_WithIndex_MovesAndRemovesFolder(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + entry := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + dir := filepath.Join(tmp, "root", "docs") + mustMkdir(t, dir) + mustWriteFile(t, filepath.Join(dir, "index.md"), "# idx", 0o644) + + if err := store.ConvertNode(entry, NodeKindPage); err != nil { + t.Fatalf("ConvertNode(section->page): %v", err) + } + + pageFile := filepath.Join(tmp, "root", "docs.md") + if _, err := os.Stat(pageFile); err != nil { + t.Fatalf("expected page file: %v", err) + } + if _, err := os.Stat(dir); !os.IsNotExist(err) { + t.Fatalf("expected folder removed") + } +} + +func TestNodeStore_ConvertNode_SectionToPage_NoIndex_CreatesEmptyPageWithFM(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + entry := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + dir := filepath.Join(tmp, "root", "docs") + mustMkdir(t, dir) + // empty folder, no index.md + + if err := store.ConvertNode(entry, NodeKindPage); err != nil { + t.Fatalf("ConvertNode(section->page no index): %v", err) + } + + pageFile := filepath.Join(tmp, "root", "docs.md") + raw := string(mustRead(t, pageFile)) + fm, _, has, err := ParseFrontmatter(raw) + if err != nil { + t.Fatalf("ParseFrontmatter: %v", err) + } + if !has || fm.LeafWikiID != "s1" || fm.LeafWikiTitle != "Docs" { + t.Fatalf("unexpected fm: %#v", fm) + } + if _, err := os.Stat(dir); !os.IsNotExist(err) { + t.Fatalf("expected folder removed") + } +} + +func mustRead(t *testing.T, path string) []byte { + t.Helper() + b, err := os.ReadFile(path) + if err != nil { + t.Fatalf("read %s: %v", path, err) + } + return b +} diff --git a/internal/core/tree/page_node.go b/internal/core/tree/page_node.go index 20bf0d1d..8ee28633 100644 --- a/internal/core/tree/page_node.go +++ b/internal/core/tree/page_node.go @@ -10,6 +10,13 @@ type PageMetadata struct { LastAuthorID string `json:"lastAuthorId"` } +type NodeKind string + +const ( + NodeKindPage NodeKind = "page" + NodeKindSection NodeKind = "section" +) + // PageNode represents a single node in the tree // It has an ID, a parent, a path, and children // The ID is a unique identifier for the entry @@ -21,6 +28,7 @@ type PageNode struct { Position int `json:"position"` // Position is the position of the entry Parent *PageNode `json:"-"` + Kind NodeKind `json:"kind"` // Kind is the kind of the node (page or folder) Metadata PageMetadata `json:"metadata"` // Metadata holds metadata about the page } diff --git a/internal/core/tree/page_store.go b/internal/core/tree/page_store.go deleted file mode 100644 index 5df35a92..00000000 --- a/internal/core/tree/page_store.go +++ /dev/null @@ -1,337 +0,0 @@ -package tree - -import ( - "encoding/json" - "errors" - "fmt" - "io" - "os" - "path" - "strings" - - "github.com/perber/wiki/internal/core/shared" -) - -type PageStore struct { - storageDir string -} - -func NewPageStore(storageDir string) *PageStore { - return &PageStore{ - storageDir: storageDir, - } -} - -func (f *PageStore) LoadTree(filename string) (*PageNode, error) { - fullPath := path.Join(f.storageDir, filename) - - // check if file exists - if _, err := os.Stat(fullPath); os.IsNotExist(err) { - return &PageNode{ - ID: "root", - Slug: "root", - Title: "root", - Parent: nil, - Position: 0, - Children: []*PageNode{}, - }, nil - } - - file, err := os.Open(fullPath) - if err != nil { - return nil, fmt.Errorf("could not open tree file") - } - defer file.Close() - data, err := io.ReadAll(file) - - if err != nil { - return nil, fmt.Errorf("could not read tree file") - } - - tree := &PageNode{} - if err := json.Unmarshal(data, tree); err != nil { - return nil, fmt.Errorf("could not unmarshal tree data") - } - - // assigns parent to children - f.assignParentToChildren(tree) - - return tree, nil -} - -func (f *PageStore) assignParentToChildren(parent *PageNode) { - for _, child := range parent.Children { - child.Parent = parent - f.assignParentToChildren(child) - } -} - -func (f *PageStore) SaveTree(filename string, tree *PageNode) error { - if tree == nil { - return errors.New("a tree is required") - } - - fullPath := path.Join(f.storageDir, filename) - - data, err := json.Marshal(tree) - if err != nil { - return fmt.Errorf("could not marshal tree: %v", err) - } - - if err := shared.WriteFileAtomic(fullPath, data, 0o644); err != nil { - return fmt.Errorf("could not atomically write tree file: %v", err) - } - - return nil -} - -func (f *PageStore) CreatePage(parentEntry *PageNode, newEntry *PageNode) error { - if parentEntry == nil { - return errors.New("a parent entry is required") - } - - if newEntry == nil { - return errors.New("a new entry is required") - } - - // Retrieving the path of the parent entry - parentPath := path.Join(f.storageDir, GeneratePathFromPageNode(parentEntry)) - - if err := EnsurePageIsFolder(f.storageDir, GeneratePathFromPageNode(parentEntry)); err != nil { - return fmt.Errorf("could not prepare parent folder: %w", err) - } - - // Check if the folder exists - if _, err := os.Stat(parentPath); os.IsNotExist(err) { - if err := os.MkdirAll(parentPath, 0755); err != nil { - return fmt.Errorf("could not create folder: %v", err) - } - // Create an empty index.md file / Fallback! - indexPath := path.Join(parentPath, "index.md") - fm := Frontmatter{LeafWikiID: parentEntry.ID, LeafWikiTitle: parentEntry.Title} - content, err := BuildMarkdownWithFrontmatter(fm, "") - if err != nil { - return fmt.Errorf("could not build markdown with frontmatter: %v", err) - } - if err := shared.WriteFileAtomic(indexPath, []byte(content), 0o644); err != nil { - return fmt.Errorf("could not create index file: %v", err) - } - } - - // Now we can create the new entry as a file in the parent folder - newFilename := path.Join(parentPath, newEntry.Slug+".md") - if _, err := os.Stat(newFilename); err == nil { - // The file already exists - return fmt.Errorf("file already exists: %v", err) - } - - // Create the file - fm := Frontmatter{LeafWikiID: newEntry.ID} - content, err := BuildMarkdownWithFrontmatter(fm, "# "+newEntry.Title+"\n") - if err != nil { - return fmt.Errorf("could not build markdown with frontmatter: %v", err) - } - if err := shared.WriteFileAtomic(newFilename, []byte(content), 0o644); err != nil { - return fmt.Errorf("could not create file: %v", err) - } - return nil -} - -func (f *PageStore) DeletePage(entry *PageNode) error { - if entry == nil { - return errors.New("an entry is required") - } - - // Retrieving the path of the entry - entryPath := path.Join(f.storageDir, GeneratePathFromPageNode(entry)) - - // Check if the entry is a folder - if info, err := os.Stat(entryPath); err == nil && info.IsDir() { - // Delete the folder - if err := os.RemoveAll(entryPath); err != nil { - return fmt.Errorf("could not delete folder: %v", err) - } - } - - // Check if the entry is a file - if _, err := os.Stat(entryPath + ".md"); err == nil { - // Delete the file - if err := os.Remove(entryPath + ".md"); err != nil { - return fmt.Errorf("could not delete file: %v", err) - } - } - - if entry.Parent != nil { - _ = FoldPageFolderIfEmpty(f.storageDir, GeneratePathFromPageNode(entry.Parent)) - } - - return nil -} - -func (f *PageStore) UpdatePage(entry *PageNode, slug string, content string) error { - if entry == nil { - return errors.New("an entry is required") - } - - filePath, err := f.getFilePath(entry) - if err != nil { - return fmt.Errorf("could not get file path: %v", err) - } - - // Check if the file exists - file, err := os.Stat(filePath) - if err != nil { - return fmt.Errorf("file not found: %v", err) - } - - mode := file.Mode() - - // Update the file content - fm := Frontmatter{LeafWikiID: entry.ID, LeafWikiTitle: entry.Title} - contentWithFM, err := BuildMarkdownWithFrontmatter(fm, content) - if err != nil { - return fmt.Errorf("could not build markdown with frontmatter: %v", err) - } - if err := shared.WriteFileAtomic(filePath, []byte(contentWithFM), mode); err != nil { - return fmt.Errorf("could not write to file atomically: %v", err) - } - - // We need to check if the slug has changed - if entry.Slug != slug { - // Get the old path - oldPath := path.Join(f.storageDir, GeneratePathFromPageNode(entry)) - // Split the path - parts := strings.Split(oldPath, "/") - // Create the new path - newPath := strings.Join(parts[:len(parts)-1], "/") + "/" + slug - // Check if the old path is a directory - // If it is a directory, we need to rename the directory - // If it is a file, we need to rename the file - if _, err := os.Stat(oldPath); err == nil { - // Rename the directory - if err := os.Rename(oldPath, newPath); err != nil { - return fmt.Errorf("could not rename directory: %v", err) - } - - return nil - } - // Rename the file - if err := os.Rename(oldPath+".md", newPath+".md"); err != nil { - return fmt.Errorf("could not rename file: %v", err) - } - } - - return nil -} - -// MovePage moves a page to a other node -func (f *PageStore) MovePage(entry *PageNode, parentEntry *PageNode) error { - if entry == nil { - return errors.New("an entry is required") - } - - if parentEntry == nil { - return errors.New("a parent entry is required") - } - - // Retrieving the path of the entry - parentPath := path.Join(f.storageDir, GeneratePathFromPageNode(parentEntry)) - - if err := EnsurePageIsFolder(f.storageDir, GeneratePathFromPageNode(parentEntry)); err != nil { - return fmt.Errorf("could not convert parent to folder: %w", err) - } - - // now we have created the folder, we can move the entry to the new parent - currentPath := path.Join(f.storageDir, GeneratePathFromPageNode(entry)) - - // Check if the entry is a file - var src, dest string - if _, err := os.Stat(currentPath + ".md"); err == nil { - src = currentPath + ".md" - dest = path.Join(parentPath, entry.Slug+".md") - } else { - src = currentPath - dest = path.Join(parentPath, entry.Slug) - } - - // Move the file to the parentPath - if err := os.Rename(src, dest); err != nil { - return fmt.Errorf("could not move file: %v", err) - } - - if entry.Parent != nil { - _ = FoldPageFolderIfEmpty(f.storageDir, GeneratePathFromPageNode(entry.Parent)) - } - - return nil -} - -// ReadPageRaw returns the raw content of a page including frontmatter -func (f *PageStore) ReadPageRaw(entry *PageNode) (string, error) { - filePath, err := f.getFilePath(entry) - if err != nil { - return "", err - } - raw, err := os.ReadFile(filePath) - if err != nil { - return "", err - } - return string(raw), nil -} - -// ReadPageContent returns the content of a page -func (f *PageStore) ReadPageContent(entry *PageNode) (string, error) { - if entry == nil { - return "", errors.New("an entry is required") - } - - filePath, err := f.getFilePath(entry) - if err != nil { - return "", fmt.Errorf("could not get file path: %v", err) - } - - // Check if the file exists - if _, err := os.Stat(filePath); err != nil { - return "", fmt.Errorf("file not found: %v", err) - } - - // Read the file - file, err := os.Open(filePath) - if err != nil { - return "", fmt.Errorf("could not open file: %v", err) - } - defer file.Close() - - raw, err := io.ReadAll(file) - if err != nil { - return "", fmt.Errorf("could not read file: %v", err) - } - - _, content, _, err := ParseFrontmatter(string(raw)) - if err != nil { - return string(raw), err - } - - return content, nil -} - -func (f *PageStore) getFilePath(entry *PageNode) (string, error) { - if entry == nil { - return "", errors.New("an entry is required") - } - - // Retrieving the path of the entry - entryPath := path.Join(f.storageDir, GeneratePathFromPageNode(entry)) - - // Check if the entry is a file - if _, err := os.Stat(entryPath + ".md"); err == nil { - return entryPath + ".md", nil - } - - // Check if the entry is a folder - if info, err := os.Stat(entryPath); err == nil && info.IsDir() { - return path.Join(entryPath, "index.md"), nil - } - - return "", ErrFileNotFound -} diff --git a/internal/core/tree/page_store_test.go b/internal/core/tree/page_store_test.go deleted file mode 100644 index 216aea99..00000000 --- a/internal/core/tree/page_store_test.go +++ /dev/null @@ -1,659 +0,0 @@ -package tree - -import ( - "os" - "path/filepath" - "testing" -) - -func TestPageStore_CreatePage(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - root := &PageNode{ - ID: "root", - Title: "Root", - Slug: "root", - Children: []*PageNode{}, - } - - page := &PageNode{ - ID: "page-1", - Title: "Hello World", - Slug: "hello-world", - Parent: root, - } - - err := store.CreatePage(root, page) - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - - // Prüfen, ob Datei existiert - expectedFile := filepath.Join(tmpDir, "root", "hello-world.md") - if _, err := os.Stat(expectedFile); os.IsNotExist(err) { - t.Errorf("Expected file was not created: %v", expectedFile) - } - - // Optional: Inhalt checken - content, err := os.ReadFile(expectedFile) - if err != nil { - t.Fatalf("Failed to read file: %v", err) - } - - expected := "---\nleafwiki_id: page-1\n---\n# Hello World\n" - if string(content) != expected { - t.Errorf("Unexpected file content. Got: %q, Expected: %q", string(content), expected) - } -} - -func TestPageStore_CreatePage_WithFallbackCreatesIndex(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - // Simuliere vorhandene root.md-Datei (die in Folder + index.md migriert werden soll) - rootFile := filepath.Join(tmpDir, "root.md") - if err := os.WriteFile(rootFile, []byte("# Root File"), 0644); err != nil { - t.Fatalf("Failed to create root.md: %v", err) - } - - root := &PageNode{ - ID: "root", - Title: "Root", - Slug: "root", - Children: []*PageNode{}, - } - - page := &PageNode{ - ID: "page-2", - Title: "Subpage", - Slug: "subpage", - Parent: root, - } - - err := store.CreatePage(root, page) - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - - // Erwartet: root/index.md existiert - indexPath := filepath.Join(tmpDir, "root", "index.md") - if _, err := os.Stat(indexPath); os.IsNotExist(err) { - t.Errorf("Expected fallback index.md file not found: %v", indexPath) - } -} - -func TestPageStore_CreatePage_DeepHierarchy(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - // Baue tiefe Baumstruktur: root → arch → project1 - root := &PageNode{ - ID: "root", - Title: "Root", - Slug: "root", - Children: []*PageNode{}, - } - arch := &PageNode{ - ID: "arch", - Title: "Architecture", - Slug: "architecture", - Parent: root, - Children: []*PageNode{}, - } - project := &PageNode{ - ID: "project1", - Title: "Project One", - Slug: "project-one", - Parent: arch, - Children: []*PageNode{}, - } - page := &PageNode{ - ID: "final", - Title: "Deep Content", - Slug: "deep-content", - Parent: project, - } - - // Füge Struktur hinzu (simulate parent nodes) - root.Children = []*PageNode{arch} - arch.Children = []*PageNode{project} - project.Children = []*PageNode{} - - // Versuche, Page in tiefem Pfad anzulegen - err := store.CreatePage(project, page) - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - - // Prüfe, ob Datei wirklich existiert - expectedPath := filepath.Join(tmpDir, "root", "architecture", "project-one", "deep-content.md") - if _, err := os.Stat(expectedPath); os.IsNotExist(err) { - t.Errorf("Expected file not found at deep path: %s", expectedPath) - } -} - -func TestPageStore_CreatePage_NilChecks(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - validParent := &PageNode{ - ID: "root", - Title: "Root", - Slug: "root", - Children: []*PageNode{}, - } - - // Fall 1: Parent ist nil - err := store.CreatePage(nil, &PageNode{ID: "1", Title: "Page", Slug: "page"}) - if err == nil { - t.Error("Expected error when parent is nil, got nil") - } - - // Fall 2: Page ist nil - err = store.CreatePage(validParent, nil) - if err == nil { - t.Error("Expected error when page is nil, got nil") - } -} - -func TestPageStore_DeletePage_File(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "p1", - Title: "Page", - Slug: "page", - } - - // Erstelle Datei manuell - filePath := filepath.Join(tmpDir, "page.md") - if err := os.WriteFile(filePath, []byte("# Page"), 0644); err != nil { - t.Fatalf("Failed to create page file: %v", err) - } - - // DeletePage aufrufen - if err := store.DeletePage(page); err != nil { - t.Fatalf("DeletePage failed: %v", err) - } - - // Prüfen, ob Datei weg ist - if _, err := os.Stat(filePath); !os.IsNotExist(err) { - t.Errorf("Expected file to be deleted: %v", filePath) - } -} - -func TestPageStore_DeletePage_Directory(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - // Seite mit Ordnerstruktur - page := &PageNode{ - ID: "p2", - Title: "Folder Page", - Slug: "folder-page", - } - - dirPath := filepath.Join(tmpDir, "folder-page") - if err := os.MkdirAll(dirPath, 0755); err != nil { - t.Fatalf("Failed to create folder: %v", err) - } - - // Simuliere index.md - indexFile := filepath.Join(dirPath, "index.md") - if err := os.WriteFile(indexFile, []byte("# Index"), 0644); err != nil { - t.Fatalf("Failed to create index.md: %v", err) - } - - // DeletePage aufrufen - if err := store.DeletePage(page); err != nil { - t.Fatalf("DeletePage failed: %v", err) - } - - // Ordner darf nicht mehr existieren - if _, err := os.Stat(dirPath); !os.IsNotExist(err) { - t.Errorf("Expected folder to be deleted: %v", dirPath) - } -} - -func TestPageStore_DeletePage_NilEntry(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - err := store.DeletePage(nil) - if err == nil { - t.Errorf("Expected error when passing nil entry, got none") - } -} - -func TestPageStore_UpdatePage_ContentOnly(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "p1", - Title: "My Page", - Slug: "my-page", - } - - filePath := filepath.Join(tmpDir, "my-page.md") - if err := os.WriteFile(filePath, []byte("# Old Content"), 0644); err != nil { - t.Fatalf("Failed to create page file: %v", err) - } - - newContent := "# New Content" - err := store.UpdatePage(page, "my-page", newContent) - if err != nil { - t.Fatalf("UpdatePage failed: %v", err) - } - - data, err := os.ReadFile(filePath) - if err != nil { - t.Fatalf("Could not read updated file: %v", err) - } - - expectedNewContent := "---\nleafwiki_id: p1\nleafwiki_title: My Page\n---\n# New Content" - - if string(data) != expectedNewContent { - t.Errorf("Expected content %q, got %q", expectedNewContent, string(data)) - } -} - -func TestPageStore_UpdatePage_WithSlugChange_File(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "p2", - Title: "Old Page", - Slug: "old-page", - } - - oldPath := filepath.Join(tmpDir, "old-page.md") - if err := os.WriteFile(oldPath, []byte("# Old Page"), 0644); err != nil { - t.Fatalf("Failed to create old page: %v", err) - } - - newSlug := "new-page" - err := store.UpdatePage(page, newSlug, "# Updated Content") - if err != nil { - t.Fatalf("UpdatePage failed: %v", err) - } - - newPath := filepath.Join(tmpDir, "new-page.md") - if _, err := os.Stat(newPath); os.IsNotExist(err) { - t.Errorf("Expected renamed file at: %v", newPath) - } -} - -func TestPageStore_UpdatePage_WithSlugChange_Directory(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "p3", - Title: "Old Dir", - Slug: "old-dir", - } - - oldDir := filepath.Join(tmpDir, "old-dir") - if err := os.MkdirAll(oldDir, 0755); err != nil { - t.Fatalf("Failed to create old directory: %v", err) - } - - indexFile := filepath.Join(oldDir, "index.md") - if err := os.WriteFile(indexFile, []byte("# Index"), 0644); err != nil { - t.Fatalf("Failed to create index.md: %v", err) - } - - newSlug := "new-dir" - err := store.UpdatePage(page, newSlug, "# New Index") - if err != nil { - t.Fatalf("UpdatePage failed: %v", err) - } - - newDir := filepath.Join(tmpDir, "new-dir") - if _, err := os.Stat(newDir); os.IsNotExist(err) { - t.Errorf("Expected renamed directory: %v", newDir) - } -} - -func TestPageStore_UpdatePage_InvalidEntry(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - err := store.UpdatePage(nil, "slug", "content") - if err == nil { - t.Errorf("Expected error when updating nil entry, got none") - } -} - -func TestPageStore_UpdatePage_FileNotFound(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "p4", - Title: "Ghost Page", - Slug: "ghost", - } - - err := store.UpdatePage(page, "ghost", "# Nothing here") - if err == nil { - t.Errorf("Expected error when updating non-existent file, got none") - } -} - -func TestPageStore_MovePage_FileToFolder(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ID: "1", Title: "Page A", Slug: "a"} - pagePath := filepath.Join(tmpDir, "a.md") - if err := os.WriteFile(pagePath, []byte("# Page A"), 0644); err != nil { - t.Fatalf("Setup failed: %v", err) - } - - parent := &PageNode{ID: "root", Title: "Root", Slug: "root"} - parentFile := filepath.Join(tmpDir, "root.md") - if err := os.WriteFile(parentFile, []byte("# Root Page"), 0644); err != nil { - t.Fatalf("Failed to create root.md: %v", err) - } - - err := store.MovePage(page, parent) - if err != nil { - t.Fatalf("MovePage failed: %v", err) - } - - newPath := filepath.Join(tmpDir, "root", "a.md") - if _, err := os.Stat(newPath); os.IsNotExist(err) { - t.Errorf("Expected file to be moved to: %v", newPath) - } -} - -func TestPageStore_MovePage_FolderToFolder(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - // Ordnerstruktur erstellen - page := &PageNode{ID: "2", Title: "Docs", Slug: "docs"} - pagePath := filepath.Join(tmpDir, "docs") - if err := os.MkdirAll(pagePath, 0755); err != nil { - t.Fatalf("Failed to create source folder: %v", err) - } - - // Zielordner - target := &PageNode{ID: "root", Title: "Root", Slug: "root"} - targetPath := filepath.Join(tmpDir, "root") - if err := os.MkdirAll(targetPath, 0755); err != nil { - t.Fatalf("Failed to create target folder: %v", err) - } - - err := store.MovePage(page, target) - if err != nil { - t.Fatalf("MovePage failed: %v", err) - } - - newPath := filepath.Join(targetPath, "docs") - if _, err := os.Stat(newPath); os.IsNotExist(err) { - t.Errorf("Expected moved folder not found at: %v", newPath) - } -} - -func TestPageStore_MovePage_InvalidNilInput(t *testing.T) { - store := NewPageStore(t.TempDir()) - - err := store.MovePage(nil, nil) - if err == nil { - t.Errorf("Expected error on nil inputs, got none") - } -} - -func TestPageStore_MovePage_PreventCircularMove(t *testing.T) { - // Setup - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - // Erzeuge einfache Baumstruktur: root → parent → child - root := &PageNode{ - ID: "root", - Title: "Root", - Slug: "root", - Children: []*PageNode{}, - } - - parent := &PageNode{ - ID: "parent", - Title: "Parent", - Slug: "parent", - Parent: root, - Children: []*PageNode{}, - } - - child := &PageNode{ - ID: "child", - Title: "Child", - Slug: "child", - Parent: parent, - Children: []*PageNode{}, - } - - root.Children = []*PageNode{parent} - parent.Children = []*PageNode{child} - - // 🧪 Versuch: parent in child verschieben → sollte fehlschlagen (wenn später implementiert) - err := store.MovePage(parent, child) - - // Aktuell kein Check implementiert → nur Hinweis - if err == nil { - t.Log("[TODO] Expected failure when moving parent into child (circular), but got none.") - // Optionale manuelle Fehlerausgabe, damit es sichtbar bleibt - t.Fail() - } -} - -func TestPageStore_ReadPageContent_File(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "read1", - Title: "Read Me", - Slug: "read-me", - } - - filePath := filepath.Join(tmpDir, "read-me.md") - expected := "# Hello from file" - if err := os.WriteFile(filePath, []byte(expected), 0644); err != nil { - t.Fatalf("Failed to write test file: %v", err) - } - - content, err := store.ReadPageContent(page) - if err != nil { - t.Fatalf("ReadPageContent failed: %v", err) - } - - if content != expected { - t.Errorf("Expected content %q, got %q", expected, content) - } -} - -func TestPageStore_ReadPageContent_Index(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "read2", - Title: "Folder Page", - Slug: "folder-page", - } - - folder := filepath.Join(tmpDir, "folder-page") - if err := os.MkdirAll(folder, 0755); err != nil { - t.Fatalf("Failed to create folder: %v", err) - } - - indexPath := filepath.Join(folder, "index.md") - expected := "# Hello from index" - if err := os.WriteFile(indexPath, []byte(expected), 0644); err != nil { - t.Fatalf("Failed to write index file: %v", err) - } - - content, err := store.ReadPageContent(page) - if err != nil { - t.Fatalf("ReadPageContent failed: %v", err) - } - - if content != expected { - t.Errorf("Expected content %q, got %q", expected, content) - } -} - -func TestPageStore_ReadPageContent_NotFound(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "read3", - Title: "Missing Page", - Slug: "missing", - } - - _, err := store.ReadPageContent(page) - if err == nil { - t.Errorf("Expected error for missing file, got none") - } -} - -func TestPageStore_SaveAndLoadTree_AssignsParent(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - tree := &PageNode{ - ID: "root", - Title: "Root", - Slug: "root", - Children: []*PageNode{ - { - ID: "child-1", - Title: "Child 1", - Slug: "child-1", - Children: []*PageNode{ - { - ID: "grandchild-1", - Title: "Grandchild 1", - Slug: "grandchild-1", - }, - }, - }, - }, - } - - if err := store.SaveTree("tree.json", tree); err != nil { - t.Fatalf("SaveTree failed: %v", err) - } - - loaded, err := store.LoadTree("tree.json") - if err != nil { - t.Fatalf("LoadTree failed: %v", err) - } - - child := loaded.Children[0] - grandchild := child.Children[0] - - if child.Parent == nil || child.Parent.ID != loaded.ID { - t.Errorf("Child node's parent not assigned correctly") - } - - if grandchild.Parent == nil || grandchild.Parent.ID != child.ID { - t.Errorf("Grandchild node's parent not assigned correctly") - } -} - -func TestPageStore_LoadTree_MissingFile(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - tree, err := store.LoadTree("nonexistent.json") - if err != nil { - t.Fatalf("Expected default tree, got error: %v", err) - } - - if tree.ID != "root" { - t.Errorf("Expected root ID, got %q", tree.ID) - } -} - -func TestPageStore_LoadTree_InvalidJSON(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - path := filepath.Join(tmpDir, "tree.json") - if err := os.WriteFile(path, []byte("invalid-json"), 0644); err != nil { - t.Fatalf("Failed to write corrupt file: %v", err) - } - - _, err := store.LoadTree("tree.json") - if err == nil { - t.Error("Expected error when loading invalid JSON, got none") - } -} - -func TestPageStore_getFilePath(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - // Case 1: .md file exists - fileNode := &PageNode{ - ID: "file1", - Slug: "page", - Title: "Page", - } - filePath := filepath.Join(tmpDir, "page.md") - if err := os.WriteFile(filePath, []byte("Content"), 0644); err != nil { - t.Fatalf("Failed to create .md file: %v", err) - } - - path, err := store.getFilePath(fileNode) - if err != nil { - t.Fatalf("Expected file path for .md file, got error: %v", err) - } - if path != filePath { - t.Errorf("Unexpected path. Got: %s, Expected: %s", path, filePath) - } - - // Case 2: Directory with index.md - dirNode := &PageNode{ - ID: "dir1", - Slug: "folder", - Title: "Folder", - } - dirPath := filepath.Join(tmpDir, "folder") - indexPath := filepath.Join(dirPath, "index.md") - if err := os.MkdirAll(dirPath, 0755); err != nil { - t.Fatalf("Failed to create folder: %v", err) - } - if err := os.WriteFile(indexPath, []byte("Index content"), 0644); err != nil { - t.Fatalf("Failed to write index.md: %v", err) - } - - path, err = store.getFilePath(dirNode) - if err != nil { - t.Fatalf("Expected index.md path, got error: %v", err) - } - if path != indexPath { - t.Errorf("Unexpected path. Got: %s, Expected: %s", path, indexPath) - } - - // Case 3: Not found - invalidNode := &PageNode{ - ID: "missing", - Slug: "does-not-exist", - Title: "Missing", - } - _, err = store.getFilePath(invalidNode) - if err == nil { - t.Errorf("Expected error for missing file, got nil") - } -} diff --git a/internal/core/tree/tree_service.go b/internal/core/tree/tree_service.go index 732e60da..eb5a92c5 100644 --- a/internal/core/tree/tree_service.go +++ b/internal/core/tree/tree_service.go @@ -19,7 +19,7 @@ type TreeService struct { storageDir string treeFilename string tree *PageNode - store *PageStore + store *NodeStore log *slog.Logger mu sync.RWMutex @@ -31,7 +31,7 @@ func NewTreeService(storageDir string) *TreeService { storageDir: storageDir, treeFilename: "tree.json", tree: nil, - store: NewPageStore(storageDir), + store: NewNodeStore(storageDir), log: slog.Default().With("component", "TreeService"), } } @@ -110,25 +110,34 @@ func (t *TreeService) migrateToV1() error { // Read creation and modification times from the filesystem // and set them in the metadata - filePath, err := t.store.getFilePath(node) + + r, err := t.store.resolveNode(node) if err != nil { - // Log the error and continue - // We still want to backfill metadata for other nodes - // but we cannot do it for this node - t.log.Error("Could not get file path for node", "nodeID", node.ID, "error", err) + // Log and continue (same behavior as before) + t.log.Error("Could not resolve node for metadata backfill", "nodeID", node.ID, "error", err) return nil } + // Prefer the real on-disk object: + // - Page => .md + // - Folder with content => /index.md + // - Folder without content => use folder mtime + statPath := r.FilePath + if r.Kind == NodeKindSection && !r.HasContent { + statPath = r.DirPath + } + // The default value is set to now createdAt := time.Now().UTC() updatedAt := time.Now().UTC() - // Try to read file info; on error, log non-NotExist issues and keep defaults - if info, err := os.Stat(filePath); err == nil { - createdAt = info.ModTime().UTC() - updatedAt = info.ModTime().UTC() - } else if !os.IsNotExist(err) { - t.log.Error("Could not stat file for node", "nodeID", node.ID, "filePath", filePath, "error", err) + if statPath != "" { + if info, err := os.Stat(statPath); err == nil { + createdAt = info.ModTime().UTC() + updatedAt = info.ModTime().UTC() + } else if !os.IsNotExist(err) { + t.log.Error("Could not stat node for metadata", "nodeID", node.ID, "path", statPath, "error", err) + } } node.Metadata = PageMetadata{ @@ -155,7 +164,13 @@ func (t *TreeService) migrateToV1() error { // migrateToV2 migrates the tree to the v2 schema // Adds frontmatter to all existing pages if missing +// Adds kind to all nodes func (t *TreeService) migrateToV2() error { + if t.tree == nil { + return ErrTreeNotLoaded + } + t.backfillKindFromFSLocked() + // Traverse all pages and add frontmatter if missing var addFrontmatter func(node *PageNode) error addFrontmatter = func(node *PageNode) error { @@ -174,14 +189,14 @@ func (t *TreeService) migrateToV2() error { return nil } t.log.Error("Could not read page content for node", "nodeID", node.ID, "error", err) - return fmt.Errorf("could not read page content for node %s: %v", node.ID, err) + return fmt.Errorf("could not read page content for node %s: %w", node.ID, err) } // Parse the frontmatter fm, body, has, err := ParseFrontmatter(content) if err != nil { t.log.Error("Could not parse frontmatter for node", "nodeID", node.ID, "error", err) - return fmt.Errorf("could not parse frontmatter for node %s: %v", node.ID, err) + return fmt.Errorf("could not parse frontmatter for node %s: %w", node.ID, err) } // Decide if we need to change anything @@ -213,10 +228,9 @@ func (t *TreeService) migrateToV2() error { return fmt.Errorf("could not build markdown with frontmatter for node %s: %w", node.ID, err) } - filePath, err := t.store.getFilePath(node) + filePath, err := t.store.contentPathForNodeWrite(node) if err != nil { - t.log.Error("could not get file path", "nodeID", node.ID, "error", err) - return fmt.Errorf("could not get file path for node %s: %w", node.ID, err) + return fmt.Errorf("could not determine content path for node %s: %w", node.ID, err) } if err := writeFileAtomic(filePath, []byte(newContent), 0o644); err != nil { @@ -238,10 +252,6 @@ func (t *TreeService) migrateToV2() error { return nil } - if t.tree == nil { - return ErrTreeNotLoaded - } - // start the recursion from the children of the root for _, child := range t.tree.Children { if err := addFrontmatter(child); err != nil { @@ -253,12 +263,65 @@ func (t *TreeService) migrateToV2() error { return nil } -// SaveTree saves the tree to the storage directory -func (t *TreeService) SaveTree() error { +func (t *TreeService) backfillKindFromFSLocked() { + if t.tree == nil { + return + } + t.tree.Kind = NodeKindSection + + var walk func(n *PageNode) + walk = func(n *PageNode) { + if n == nil { + return + } + + // Root skip + if n.ID != "root" { + // Nur backfillen, wenn Kind fehlt/unknown + if n.Kind != NodeKindPage && n.Kind != NodeKindSection { + r, err := t.store.resolveNode(n) + if err == nil { + n.Kind = r.Kind + } else { + // Fallback-Heuristik, wenn auf Disk nichts existiert + if n.HasChildren() { + n.Kind = NodeKindSection + } else { + n.Kind = NodeKindPage + } + t.log.Warn("could not resolve node on disk; kind backfilled by heuristic", + "nodeID", n.ID, "slug", n.Slug, "err", err, "kind", n.Kind) + } + } + } + + for _, ch := range n.Children { + walk(ch) + } + } + + for _, ch := range t.tree.Children { + walk(ch) + } +} + +func (t *TreeService) withLockedTree(fn func() error) error { t.mu.Lock() defer t.mu.Unlock() - return t.saveTreeLocked() + return fn() +} + +func (t *TreeService) withRLockedTree(fn func() error) error { + t.mu.RLock() + defer t.mu.RUnlock() + + return fn() +} + +// SaveTree saves the tree to the storage directory +func (t *TreeService) SaveTree() error { + return t.withLockedTree(t.saveTreeLocked) } func (t *TreeService) saveTreeLocked() error { @@ -266,98 +329,74 @@ func (t *TreeService) saveTreeLocked() error { return t.store.SaveTree(t.treeFilename, t.tree) } -// Create Page adds a new page to the tree -func (t *TreeService) CreatePage(userID string, parentID *string, title string, slug string) (*string, error) { - t.mu.Lock() - defer t.mu.Unlock() - - result, err := t.createPageLocked(userID, parentID, title, slug) - if err != nil { - return nil, err - } - - if err := t.saveTreeLocked(); err != nil { - return nil, fmt.Errorf("could not save tree: %v", err) - } +// Create Node adds a new node to the tree +func (t *TreeService) CreateNode(userID string, parentID *string, title string, slug string, nodeKind *NodeKind) (*string, error) { + var result *string + err := t.withLockedTree(func() error { + var err error + result, err = t.createNodeLocked(userID, parentID, title, slug, nodeKind) + return err + }) - return result, nil + return result, err } -// createPageLocked creates a new page under the given parent +// createNodeLocked creates a new node under the given parent // Lock must be held by the caller -func (t *TreeService) createPageLocked(userID string, parentID *string, title string, slug string) (*string, error) { - +func (t *TreeService) createNodeLocked(userID string, parentID *string, title string, slug string, kind *NodeKind) (*string, error) { if t.tree == nil { return nil, ErrTreeNotLoaded } - if parentID == nil { - // The entry needs to be added to the root - root := t.tree - if root == nil { - return nil, ErrParentNotFound - } - - if root.ChildAlreadyExists(slug) { - return nil, ErrPageAlreadyExists - } + // Decide which kind we create + k := NodeKindPage + if kind != nil { + k = *kind + } - // Generate a unique ID for the new page - id, err := shared.GenerateUniqueID() + // Resolve the parent + parent := t.tree + if parentID != nil && *parentID != "" && *parentID != "root" { + var err error + parent, err = t.findPageByIDLocked(t.tree.Children, *parentID) if err != nil { - return nil, fmt.Errorf("could not generate unique ID: %v", err) - } - - now := time.Now().UTC() - - entry := &PageNode{ - ID: id, - Title: title, - Parent: root, - Slug: slug, - Position: len(root.Children), // Set the position to the end of the list - Children: []*PageNode{}, - Metadata: PageMetadata{ - CreatedAt: now, - UpdatedAt: now, - CreatorID: userID, - LastAuthorID: userID, - }, - } - - if err := t.store.CreatePage(root, entry); err != nil { - return nil, fmt.Errorf("could not create page entry: %v", err) + return nil, ErrParentNotFound } + } - root.Children = append(root.Children, entry) - - // Store Tree after adding page - // (Saving the tree is now the caller's responsibility) - return &entry.ID, nil + // Check if a child with the same slug already exists + if parent.ChildAlreadyExists(slug) { + return nil, ErrPageAlreadyExists } - // Find the parent page - parent, err := t.findPageByIDLocked(t.tree.Children, *parentID) - if err != nil { - return nil, ErrParentNotFound + // Check if the current parent is a section + // if not, we need to convert it to a section + if parent.Kind != NodeKindSection && parent.ID != "root" { + t.log.Info("converting parent to section", "parentID", parent.ID, "oldKind", parent.Kind, "newKind", NodeKindSection) + if err := t.store.ConvertNode(parent, NodeKindSection); err != nil { + return nil, fmt.Errorf("could not convert parent node: %w", err) + } + parent.Kind = NodeKindSection } - if parent.ChildAlreadyExists(slug) { - return nil, ErrPageAlreadyExists + if parent.Kind != NodeKindSection { + return nil, fmt.Errorf("cannot add child to non-section parent, got %q", parent.Kind) } // Generate a unique ID for the new page id, err := shared.GenerateUniqueID() if err != nil { - return nil, fmt.Errorf("could not generate unique ID: %v", err) + return nil, fmt.Errorf("could not generate unique ID: %w", err) } now := time.Now().UTC() + entry := &PageNode{ ID: id, - Slug: slug, Title: title, Parent: parent, + Slug: slug, + Kind: k, Position: len(parent.Children), // Set the position to the end of the list Children: []*PageNode{}, Metadata: PageMetadata{ @@ -368,23 +407,34 @@ func (t *TreeService) createPageLocked(userID string, parentID *string, title st }, } - if err := t.store.CreatePage(parent, entry); err != nil { - return nil, fmt.Errorf("could not create page entry: %v", err) + // Create on disk depending on kind + switch k { + case NodeKindPage: + if err := t.store.CreatePage(parent, entry); err != nil { + return nil, fmt.Errorf("could not create page entry: %w", err) + } + case NodeKindSection: + if err := t.store.CreateSection(parent, entry); err != nil { + return nil, fmt.Errorf("could not create section entry: %w", err) + } } // Add the new page to the parent parent.Children = append(parent.Children, entry) - return &entry.ID, nil } // FindPageByID finds a page in the tree by its ID // If the page is not found, it returns an error func (t *TreeService) FindPageByID(entry []*PageNode, id string) (*PageNode, error) { - t.mu.RLock() - defer t.mu.RUnlock() + var result *PageNode + err := t.withRLockedTree(func() error { + var err error + result, err = t.findPageByIDLocked(entry, id) + return err + }) - return t.findPageByIDLocked(entry, id) + return result, err } // findPageByIDLocked finds a page in the tree by its ID @@ -404,93 +454,144 @@ func (t *TreeService) findPageByIDLocked(entry []*PageNode, id string) (*PageNod return nil, ErrPageNotFound } -// DeletePage deletes a page from the tree -func (t *TreeService) DeletePage(userID string, id string, recursive bool) error { - t.mu.Lock() - defer t.mu.Unlock() +// DeleteNode deletes a node from the tree +func (t *TreeService) DeleteNode(userID string, id string, recursive bool) error { + err := t.withLockedTree(func() error { + if t.tree == nil { + return ErrTreeNotLoaded + } - if t.tree == nil { - return ErrTreeNotLoaded - } + // Find the node to delete + node, err := t.findPageByIDLocked(t.tree.Children, id) + if err != nil { + return ErrPageNotFound + } - // Find the page to delete - page, err := t.findPageByIDLocked(t.tree.Children, id) - if err != nil { - return ErrPageNotFound - } + // Check if node has children + if node.HasChildren() && !recursive { + return ErrPageHasChildren + } - // Check if page has children - if page.HasChildren() && !recursive { - return ErrPageHasChildren - } + // Delete the node from the parent + parent := node.Parent + if parent == nil { + return ErrParentNotFound + } - // Delete the page from the parent - parent := page.Parent - if parent == nil { - return ErrParentNotFound - } + switch node.Kind { + case NodeKindSection: + if err := t.store.DeleteSection(node); err != nil { + return fmt.Errorf("could not delete section entry: %w", err) + } + case NodeKindPage: + if node.HasChildren() { + // This should not happen due to earlier check, but just in case + // Convert to section and delete recursively + t.log.Info("converting page to section for recursive delete", "pageID", node.ID) + if err := t.store.ConvertNode(node, NodeKindSection); err != nil { + return fmt.Errorf("could not convert page to section: %w", err) + } + node.Kind = NodeKindSection + if err := t.store.DeleteSection(node); err != nil { + return fmt.Errorf("could not delete section entry: %w", err) + } + } else { + if err := t.store.DeletePage(node); err != nil { + return fmt.Errorf("could not delete page entry: %w", err) + } + } + default: + return fmt.Errorf("unknown node kind: %v", node.Kind) + } - // Delete the page from the filesystem - if err := t.store.DeletePage(page); err != nil { - return fmt.Errorf("could not delete page entry: %v", err) - } + // Remove the page from the parent + for i, e := range parent.Children { + if e.ID == id { + parent.Children = append(parent.Children[:i], parent.Children[i+1:]...) + break + } + } - // Remove the page from the parent - for i, e := range parent.Children { - if e.ID == id { - parent.Children = append(parent.Children[:i], parent.Children[i+1:]...) - break + t.reindexPositions(parent) + return t.saveTreeLocked() + }) + return err +} + +// UpdateNode updates a node (page/section) in the tree and syncs disk state via NodeStore. +func (t *TreeService) UpdateNode(userID string, id string, title string, slug string, content *string) error { + return t.withLockedTree(func() error { + if t.tree == nil { + return ErrTreeNotLoaded } - } - t.reindexPositions(parent) + // Find node + node, err := t.findPageByIDLocked(t.tree.Children, id) + if err != nil { + return ErrPageNotFound + } - return t.saveTreeLocked() -} + // Slug must be unique under same parent (when changed) + if slug != node.Slug && node.Parent != nil && node.Parent.ChildAlreadyExists(slug) { + return ErrPageAlreadyExists + } -// UpdatePage updates a page in the tree -func (t *TreeService) UpdatePage(userID string, id string, title string, slug string, content string) error { - t.mu.Lock() - defer t.mu.Unlock() + // Kind change? + // This operation is currently disabled to avoid complexity with content migration. + // We need to check if we need it later. + // if kind != nil && *kind != node.Kind { + // // Section -> Page only allowed if no children + // if node.Kind == NodeKindSection && *kind == NodeKindPage && node.HasChildren() { + // return ErrPageHasChildren + // } + + // t.log.Info("changing node kind", "nodeID", node.ID, "oldKind", node.Kind, "newKind", *kind) + // if err := t.store.ConvertNode(node, *kind); err != nil { + // return fmt.Errorf("could not convert node: %w", err) + // } + // node.Kind = *kind + // } + + // Content update? + if content != nil { + t.log.Info("updating node content", "nodeID", node.ID) + if err := t.store.UpsertContent(node, *content); err != nil { + return fmt.Errorf("could not upsert content: %w", err) + } + } - if t.tree == nil { - return ErrTreeNotLoaded - } + // Rename slug on disk (must happen while node still has old slug) + if slug != node.Slug { + t.log.Info("renaming node slug", "nodeID", node.ID, "oldSlug", node.Slug, "newSlug", slug) + if err := t.store.RenameNode(node, slug); err != nil { + return fmt.Errorf("could not rename node: %w", err) + } + node.Slug = slug + } - // Find the page to update - page, err := t.findPageByIDLocked(t.tree.Children, id) - if err != nil { - return ErrPageNotFound - } + // Update title in tree + node.Title = title - // Check if the slug is unique when slug changes! - if slug != page.Slug && page.Parent.ChildAlreadyExists(slug) { - return ErrPageAlreadyExists - } + // Update metadata + node.Metadata.UpdatedAt = time.Now().UTC() + node.Metadata.LastAuthorID = userID - // Update the entry in the filesystem! - if err := t.store.UpdatePage(page, slug, content); err != nil { - return fmt.Errorf("could not update page entry: %v", err) - } + // Keep frontmatter in sync *if file exists* (important when title changed but content == nil) + if err := t.store.SyncFrontmatterIfExists(node); err != nil { + return fmt.Errorf("could not sync frontmatter: %w", err) + } + + // Save tree + return t.saveTreeLocked() + }) - // Update the page - page.Title = title - page.Slug = slug - // Update metadata - page.Metadata.UpdatedAt = time.Now().UTC() - page.Metadata.LastAuthorID = userID - // Save the tree - return t.saveTreeLocked() } // GetTree returns the tree func (t *TreeService) GetTree() *PageNode { - t.mu.Lock() - defer t.mu.Unlock() + t.mu.RLock() + defer t.mu.RUnlock() - if t.tree != nil { - t.sortTreeByPosition(t.tree) - } return t.tree } @@ -512,7 +613,7 @@ func (t *TreeService) GetPage(id string) (*Page, error) { // Get the content of the page content, err := t.store.ReadPageContent(page) if err != nil { - return nil, fmt.Errorf("could not get page content: %v", err) + return nil, fmt.Errorf("could not get page content: %w", err) } return &Page{ @@ -537,7 +638,7 @@ func (t *TreeService) FindPageByRoutePath(entry []*PageNode, routePath string) ( // Get the content of the entry content, err := t.store.ReadPageContent(e) if err != nil { - return nil, fmt.Errorf("could not get page content: %v", err) + return nil, fmt.Errorf("could not get page content: %w", err) } return &Page{ @@ -648,7 +749,10 @@ func (t *TreeService) LookupPagePathLocked(entry []*PageNode, p string) (*PathLo return lookup, nil } -func (t *TreeService) EnsurePagePath(userID string, p string, targetTitle string) (*EnsurePathResult, error) { +// EnsurePagePath ensures that a given path exists in the tree +// It creates any missing segments as needed +// Returns the final page node and a list of created nodes +func (t *TreeService) EnsurePagePath(userID string, p string, targetTitle string, kind *NodeKind) (*EnsurePathResult, error) { t.mu.Lock() defer t.mu.Unlock() @@ -658,81 +762,78 @@ func (t *TreeService) EnsurePagePath(userID string, p string, targetTitle string created := []*PageNode{} - // Lookup the path lookup, err := t.LookupPagePathLocked(t.tree.Children, p) if err != nil { - return nil, fmt.Errorf("could not lookup page path: %v", err) + return nil, fmt.Errorf("could not lookup page path: %w", err) } - // If the path exists, return the existing page + // Path exists -> return existing if lookup.Exists { - page, err := t.findPageByIDLocked(t.tree.Children, *lookup.Segments[len(lookup.Segments)-1].ID) + last := lookup.Segments[len(lookup.Segments)-1] + page, err := t.findPageByIDLocked(t.tree.Children, *last.ID) if err != nil { - return nil, fmt.Errorf("could not find existing page by ID: %v", err) + return nil, fmt.Errorf("could not find existing page by ID: %w", err) } - return &EnsurePathResult{ - Exists: true, - Page: page, - }, nil + return &EnsurePathResult{Exists: true, Page: page}, nil } - // If the path does not exist, create it - var currentID *string + // Create missing segments + var currentID *string // nil means root for i, segment := range lookup.Segments { - if segment.Exists { - // If the segment exists, use it currentID = segment.ID continue } - // Create the segment - title := segment.Slug + // Title + segTitle := segment.Slug if i == len(lookup.Segments)-1 { - // If this is the last segment, use the targetTitle - title = targetTitle + segTitle = targetTitle } - // If the segment does not exist, create it - newPageID, err := t.createPageLocked(userID, currentID, title, segment.Slug) - if err != nil { - return nil, fmt.Errorf("could not create page: %v", err) + // Kind: intermediate segments are sections, last segment uses provided kind (or page/section default) + kindToUse := NodeKindSection + if i == len(lookup.Segments)-1 && kind != nil { + kindToUse = *kind } - currentID = newPageID - // Append the newly created page node to the created slice - // It is a synthetic PageNode with only ID, Slug and Title set - created = append(created, &PageNode{ID: *currentID, Slug: segment.Slug, Title: title}) - - // If this is the last segment, return the current page - if i == len(lookup.Segments)-1 { - page, err := t.findPageByIDLocked(t.tree.Children, *currentID) - if err != nil { - return nil, fmt.Errorf("could not find created page by ID: %v", err) - } - - // Save the tree - if err := t.saveTreeLocked(); err != nil { - return nil, fmt.Errorf("could not save tree: %v", err) - } - return &EnsurePathResult{ - Exists: true, - Page: page, - Created: created, - }, nil + newID, err := t.createNodeLocked(userID, currentID, segTitle, segment.Slug, &kindToUse) + if err != nil { + return nil, fmt.Errorf("could not create segment %q: %w", segment.Slug, err) } + currentID = newID + + created = append(created, &PageNode{ + ID: *newID, + Slug: segment.Slug, + Title: segTitle, + Kind: kindToUse, + }) } - // Save the tree + // Resolve final page + if currentID == nil { + return nil, fmt.Errorf("could not ensure page path") + } + page, err := t.findPageByIDLocked(t.tree.Children, *currentID) + if err != nil { + return nil, fmt.Errorf("could not find created page by ID: %w", err) + } + + // Save once if err := t.saveTreeLocked(); err != nil { - return nil, fmt.Errorf("could not save tree: %v", err) + return nil, fmt.Errorf("could not save tree: %w", err) } - return nil, fmt.Errorf("could not ensure page path") + return &EnsurePathResult{ + Exists: true, + Page: page, + Created: created, + }, nil } -// MovePage moves a page to another parent -func (t *TreeService) MovePage(userID string, id string, parentID string) error { +// MoveNode moves a node to another parent (root if parentID is empty/"root") +func (t *TreeService) MoveNode(userID string, id string, parentID string) error { t.mu.Lock() defer t.mu.Unlock() @@ -740,52 +841,59 @@ func (t *TreeService) MovePage(userID string, id string, parentID string) error return ErrTreeNotLoaded } - // Find the page to move - page, err := t.findPageByIDLocked(t.tree.Children, id) + // Find node to move + node, err := t.findPageByIDLocked(t.tree.Children, id) if err != nil { return ErrPageNotFound } - // We think that the page is moved to the root + // Resolve destination parent (default root) newParent := t.tree - - // Check if a parentID is provided if parentID != "" && parentID != "root" { - // Find the new parent newParent, err = t.findPageByIDLocked(t.tree.Children, parentID) if err != nil { return fmt.Errorf("new parent not found: %w", ErrParentNotFound) } } - // Child with the same slug already exists - if newParent.ChildAlreadyExists(page.Slug) { + // Same slug collision under new parent + if newParent.ChildAlreadyExists(node.Slug) { return fmt.Errorf("child with the same slug already exists: %w", ErrPageAlreadyExists) } - // Check if the page is not moved to itself - if page.ID == newParent.ID { + // Can't move into itself + if node.ID == newParent.ID { return fmt.Errorf("page cannot be moved to itself: %w", ErrPageCannotBeMovedToItself) } - // Check if a circular reference is created - if page.IsChildOf(newParent.ID, true) { + // Circular reference guard: node cannot be moved under its own descendants + if node.IsChildOf(newParent.ID, true) { return fmt.Errorf("circular reference detected: %w", ErrMovePageCircularReference) } - // Move the page in the filesystem - if err := t.store.MovePage(page, newParent); err != nil { - return fmt.Errorf("could not move page entry: %w", err) + // If destination parent is a PAGE, auto-convert it to SECTION so it can host children + if newParent.ID != "root" && newParent.Kind == NodeKindPage { + if err := t.store.ConvertNode(newParent, NodeKindSection); err != nil { + return fmt.Errorf("could not auto-convert new parent page to section: %w", err) + } + newParent.Kind = NodeKindSection + } + + // Defensive: after possible conversion, destination must be a section + if newParent.Kind != NodeKindSection { + return fmt.Errorf("destination parent must be a section, got %q", newParent.Kind) + } + + // Move on disk (strict by node.Kind inside NodeStore) + if err := t.store.MoveNode(node, newParent); err != nil { + return fmt.Errorf("could not move node on disk: %w", err) } - // Move the page to the new parent - // Remove the page from the old parent - oldParent := page.Parent + // Unlink from old parent in tree + oldParent := node.Parent if oldParent == nil { return fmt.Errorf("old parent not found: %w", ErrParentNotFound) } - - // Remove the page from the old parent for i, e := range oldParent.Children { if e.ID == id { oldParent.Children = append(oldParent.Children[:i], oldParent.Children[i+1:]...) @@ -793,20 +901,20 @@ func (t *TreeService) MovePage(userID string, id string, parentID string) error } } - // Add the page to the new parent - page.Position = len(newParent.Children) - newParent.Children = append(newParent.Children, page) - page.Parent = newParent + // Link under new parent + node.Position = len(newParent.Children) + newParent.Children = append(newParent.Children, node) + node.Parent = newParent - // Update Metadata of the moved page - page.Metadata.UpdatedAt = time.Now().UTC() - page.Metadata.LastAuthorID = userID + // Update metadata + node.Metadata.UpdatedAt = time.Now().UTC() + node.Metadata.LastAuthorID = userID - // Reindex the positions of the old parent + // Reindex positions t.reindexPositions(newParent) t.reindexPositions(oldParent) - // Save the tree + // Persist tree return t.saveTreeLocked() } @@ -876,6 +984,39 @@ func (t *TreeService) SortPages(parentID string, orderedIDs []string) error { return t.saveTreeLocked() } +// maybeCollapseSectionToPageLocked tries to collapse a section node into a page node +// It is not used currently, but after testing the user flow we might want to integrate it +// into UpdateNode or MoveNode operations +// Lock must be held by the caller +// func (t *TreeService) maybeCollapseSectionToPageLocked(node *PageNode) { +// if node == nil || node.ID == "root" { +// return +// } +// if node.Kind != NodeKindSection { +// return +// } +// if node.HasChildren() { +// return +// } + +// // Only collapse if index.md exists +// indexPath, err := t.store.contentPathForNodeRead(node) +// if err != nil { +// return +// } +// if _, err := os.Stat(indexPath); err != nil { +// // no index.md => keep as section +// return +// } + +// // Try collapse (will refuse if folder has other files) +// if err := t.store.ConvertNode(node, NodeKindPage); err != nil { +// // not allowed (e.g. folder not empty) -> keep section +// return +// } +// node.Kind = NodeKindPage +// } + func (t *TreeService) reindexPositions(parent *PageNode) { sort.SliceStable(parent.Children, func(i, j int) bool { return parent.Children[i].Position < parent.Children[j].Position @@ -885,11 +1026,11 @@ func (t *TreeService) reindexPositions(parent *PageNode) { } } -func (t *TreeService) sortTreeByPosition(node *PageNode) { - sort.SliceStable(node.Children, func(i, j int) bool { - return node.Children[i].Position < node.Children[j].Position - }) - for _, child := range node.Children { - t.sortTreeByPosition(child) - } -} +// func (t *TreeService) sortTreeByPosition(node *PageNode) { +// sort.SliceStable(node.Children, func(i, j int) bool { +// return node.Children[i].Position < node.Children[j].Position +// }) +// for _, child := range node.Children { +// t.sortTreeByPosition(child) +// } +// } diff --git a/internal/core/tree/tree_service_test.go b/internal/core/tree/tree_service_test.go index 752bb267..2e0389d9 100644 --- a/internal/core/tree/tree_service_test.go +++ b/internal/core/tree/tree_service_test.go @@ -8,1184 +8,711 @@ import ( "testing" ) -func TestTreeService_SaveAndLoadTree(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - - // Initialen Tree manuell setzen - service.tree = &PageNode{ - ID: "root", - Title: "Root", - Slug: "root", - Children: []*PageNode{ - { - ID: "child1", - Title: "Child 1", - Slug: "child-1", - Children: []*PageNode{ - { - ID: "child1a", - Title: "Child 1a", - Slug: "child-1a", - }, - }, - }, - }, - } - - // SaveTree ausführen - if err := service.SaveTree(); err != nil { - t.Fatalf("SaveTree failed: %v", err) - } - - // Neue Instanz zum Laden - loaded := NewTreeService(tmpDir) - if err := loaded.LoadTree(); err != nil { - t.Fatalf("LoadTree failed: %v", err) - } - - // Verifikation der Struktur - root := loaded.GetTree() - if root.ID != "root" || root.Title != "Root" { - t.Errorf("Expected root node not loaded correctly") - } +// --- helpers --- - if len(root.Children) != 1 || root.Children[0].ID != "child1" { - t.Errorf("Child not loaded correctly") - } +func newLoadedService(t *testing.T) (*TreeService, string) { + t.Helper() + tmpDir := t.TempDir() - grandchild := root.Children[0].Children[0] - if grandchild == nil || grandchild.ID != "child1a" { - t.Errorf("Grandchild not loaded correctly") + // Ensure schema is current so LoadTree doesn't try to migrate unless a test wants it. + if err := saveSchema(tmpDir, CurrentSchemaVersion); err != nil { + t.Fatalf("saveSchema failed: %v", err) } - // Verifiziere Parent-Zuweisung - if root.Children[0].Parent == nil || root.Children[0].Parent.ID != "root" { - t.Errorf("Parent not assigned to child node") - } - if grandchild.Parent == nil || grandchild.Parent.ID != "child1" { - t.Errorf("Parent not assigned to grandchild node") + svc := NewTreeService(tmpDir) + if err := svc.LoadTree(); err != nil { + t.Fatalf("LoadTree failed: %v", err) } + return svc, tmpDir } -func TestTreeService_LoadTree_DefaultOnMissing(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - - // Kein tree.json vorhanden → Default-Root - err := service.LoadTree() +func mustStat(t *testing.T, path string) os.FileInfo { + t.Helper() + info, err := os.Stat(path) if err != nil { - t.Fatalf("Expected to load default tree, got error: %v", err) + t.Fatalf("expected %q to exist, stat error: %v", path, err) } + return info +} - tree := service.GetTree() - if tree == nil || tree.ID != "root" { - t.Errorf("Expected default root node, got: %+v", tree) +func mustNotExist(t *testing.T, path string) { + t.Helper() + _, err := os.Stat(path) + if err == nil { + t.Fatalf("expected %q to not exist, but it exists", path) + } + if !errors.Is(err, os.ErrNotExist) { + t.Fatalf("expected os.ErrNotExist for %q, got: %v", path, err) } } -func TestTreeService_CreatePage_RootLevel(t *testing.T) { +// --- A) Load/Save basics --- + +func TestTreeService_LoadTree_DefaultRootWhenMissing(t *testing.T) { tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - _, err := service.CreatePage("system", nil, "Welcome", "welcome") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) + // schema current to prevent migration from failing due to missing schema file + if err := saveSchema(tmpDir, CurrentSchemaVersion); err != nil { + t.Fatalf("saveSchema failed: %v", err) } - tree := service.GetTree() - if len(tree.Children) != 1 { - t.Errorf("Expected 1 child at root level, got %d", len(tree.Children)) + svc := NewTreeService(tmpDir) + if err := svc.LoadTree(); err != nil { + t.Fatalf("LoadTree failed: %v", err) } - child := tree.Children[0] - if child.Title != "Welcome" || child.Slug != "welcome" { - t.Errorf("Child has incorrect data: %+v", child) + tree := svc.GetTree() + if tree == nil || tree.ID != "root" { + t.Fatalf("expected default root, got: %+v", tree) } - - // Datei muss existieren - expectedPath := filepath.Join(tmpDir, "root", "welcome.md") - if _, err := os.Stat(expectedPath); os.IsNotExist(err) { - t.Errorf("Expected file not found: %s", expectedPath) + if tree.Kind != NodeKindSection { + t.Fatalf("expected root to be section, got %q", tree.Kind) } } -func TestTreeService_CreatePage_Nested(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_SaveAndLoad_RoundtripParents(t *testing.T) { + svc, tmpDir := newLoadedService(t) - // Zuerst einen Parent anlegen - _, err := service.CreatePage("system", nil, "Docs", "docs") + // Create a small tree through public API (exercises disk + tree) + idA, err := svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("Failed to create parent page: %v", err) + t.Fatalf("CreateNode A failed: %v", err) } - - // ID des Elternteils holen - parent := service.GetTree().Children[0] - - // Jetzt Subpage erstellen - _, err = service.CreatePage("system", &parent.ID, "Getting Started", "getting-started") + _, err = svc.CreateNode("system", idA, "B", "b", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("CreateNode B failed: %v", err) } - if len(parent.Children) != 1 { - t.Errorf("Expected 1 child under parent, got %d", len(parent.Children)) + if err := svc.SaveTree(); err != nil { + t.Fatalf("SaveTree failed: %v", err) } - sub := parent.Children[0] - if sub.Slug != "getting-started" { - t.Errorf("Unexpected slug: %s", sub.Slug) + // Reload in a new service instance + if err := saveSchema(tmpDir, CurrentSchemaVersion); err != nil { + t.Fatalf("saveSchema failed: %v", err) } - - expected := filepath.Join(tmpDir, "root", "docs", "getting-started.md") - if _, err := os.Stat(expected); os.IsNotExist(err) { - t.Errorf("Expected nested file not found: %s", expected) + loaded := NewTreeService(tmpDir) + if err := loaded.LoadTree(); err != nil { + t.Fatalf("LoadTree failed: %v", err) } -} -func TestTreeService_CreatePage_InvalidParent(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - - invalidID := "does-not-exist" - _, err := service.CreatePage("system", &invalidID, "Broken", "broken") - if err == nil { - t.Errorf("Expected error for invalid parent ID, got none") + root := loaded.GetTree() + if len(root.Children) != 1 { + t.Fatalf("expected 1 child at root, got %d", len(root.Children)) + } + a := root.Children[0] + if a.Parent == nil || a.Parent.ID != "root" { + t.Fatalf("expected parent pointer on A") + } + if len(a.Children) != 1 { + t.Fatalf("expected A to have 1 child, got %d", len(a.Children)) + } + b := a.Children[0] + if b.Parent == nil || b.Parent.ID != a.ID { + t.Fatalf("expected parent pointer on B") } } -func TestTreeService_UpdatePage_ContentAndSlug(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +// --- B) Create/Update/Delete disk sync --- - // Seite anlegen - _, err := service.CreatePage("system", nil, "Docs", "docs") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - page := service.GetTree().Children[0] +func TestTreeService_CreateNode_Page_Root_CreatesFileAndFrontmatter(t *testing.T) { + svc, tmpDir := newLoadedService(t) - // Inhalt + Slug ändern - newSlug := "documentation" - newContent := "# Updated Docs" - err = service.UpdatePage("system", page.ID, "Documentation", newSlug, newContent) + id, err := svc.CreateNode("system", nil, "Welcome", "welcome", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("UpdatePage failed: %v", err) + t.Fatalf("CreateNode failed: %v", err) } - // Neuer Pfad sollte existieren - newPath := filepath.Join(tmpDir, "root", newSlug+".md") - if _, err := os.Stat(newPath); os.IsNotExist(err) { - t.Errorf("Expected updated file at %s not found", newPath) - } + // file path: /root/welcome.md (based on your existing tests + GeneratePath convention) + p := filepath.Join(tmpDir, "root", "welcome.md") + mustStat(t, p) - // Inhalt prüfen - data, err := os.ReadFile(newPath) + raw, err := os.ReadFile(p) if err != nil { - t.Fatalf("Failed to read file: %v", err) + t.Fatalf("read file: %v", err) } - if !strings.Contains(string(data), newContent) { - t.Errorf("Expected content %q, got %q", newContent, string(data)) - } -} -func TestTreeService_UpdatePage_FileNotFound(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - - // Create a page in the tree but do not create the corresponding file - id := "ghost" - page := &PageNode{ - ID: id, - Title: "Ghost", - Slug: "ghost", - Parent: service.tree, + fm, _, has, err := ParseFrontmatter(string(raw)) + if err != nil { + t.Fatalf("ParseFrontmatter: %v", err) } - service.tree.Children = append(service.tree.Children, page) - - // Versuch zu aktualisieren - err := service.UpdatePage("system", id, "Still Ghost", "still-ghost", "# Boo") - if err == nil { - t.Error("Expected error when file does not exist") + if !has { + t.Fatalf("expected frontmatter to exist") } -} - -func TestTreeService_UpdatePage_InvalidID(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - - err := service.UpdatePage("system", "unknown", "Nope", "nope", "# nope") - if err == nil { - t.Error("Expected error for invalid ID, got none") + if strings.TrimSpace(fm.LeafWikiID) != *id { + t.Fatalf("expected leafwiki_id=%q, got %q", *id, fm.LeafWikiID) } } -func TestTreeService_DeletePage_Success(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_CreateChild_UnderPage_AutoConvertsParentToSection(t *testing.T) { + svc, tmpDir := newLoadedService(t) - // Seite erstellen - _, err := service.CreatePage("system", nil, "DeleteMe", "delete-me") + // Create parent as page + parentID, err := svc.CreateNode("system", nil, "Docs", "docs", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("Create parent failed: %v", err) } - page := service.GetTree().Children[0] - // Löschen - err = service.DeletePage("system", page.ID, false) - if err != nil { - t.Fatalf("DeletePage failed: %v", err) - } + // Should exist as file initially + parentFile := filepath.Join(tmpDir, "root", "docs.md") + mustStat(t, parentFile) - // Datei darf nicht mehr existieren - path := filepath.Join(tmpDir, "root", "delete-me.md") - if _, err := os.Stat(path); !os.IsNotExist(err) { - t.Errorf("Expected file to be deleted: %s", path) + // Create child under parent: must convert parent to section + _, err = svc.CreateNode("system", parentID, "Getting Started", "getting-started", ptrKind(NodeKindPage)) + if err != nil { + t.Fatalf("Create child failed: %v", err) } - // Seite sollte aus Tree entfernt worden sein - if len(service.GetTree().Children) != 0 { - t.Errorf("Expected page to be removed from tree") - } -} + // Parent should now be a folder with index.md (converted from docs.md) + parentDir := filepath.Join(tmpDir, "root", "docs") + mustStat(t, parentDir) + index := filepath.Join(parentDir, "index.md") + mustStat(t, index) -func TestTreeService_DeletePage_HasChildrenWithoutRecursive(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() + // Old file should be gone + mustNotExist(t, parentFile) - // Parent + Child - _, err := service.CreatePage("system", nil, "Parent", "parent") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - parent := service.GetTree().Children[0] + // Child file should be inside folder + childFile := filepath.Join(parentDir, "getting-started.md") + mustStat(t, childFile) - _, err = service.CreatePage("system", &parent.ID, "Child", "child") + // Tree kind updated + parentNode, err := svc.FindPageByID(svc.GetTree().Children, *parentID) if err != nil { - t.Fatalf("CreatePage (child) failed: %v", err) + t.Fatalf("FindPageByID: %v", err) } - - // Try deleting parent without recursive - err = service.DeletePage("system", parent.ID, false) - if err == nil { - t.Error("Expected error when deleting parent with children without recursive flag") + if parentNode.Kind != NodeKindSection { + t.Fatalf("expected parent kind section, got %q", parentNode.Kind) } } -func TestTreeService_DeletePage_InvalidID(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_UpdateNode_TitleOnly_SyncsFrontmatterIfFileExists(t *testing.T) { + svc, tmpDir := newLoadedService(t) - err := service.DeletePage("system", "nonexistent", false) - if err == nil { - t.Error("Expected error for unknown ID") + id, err := svc.CreateNode("system", nil, "Docs", "docs", ptrKind(NodeKindPage)) + if err != nil { + t.Fatalf("CreateNode failed: %v", err) } -} -func TestTreeService_DeletePage_Recursive(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() + p := filepath.Join(tmpDir, "root", "docs.md") + mustStat(t, p) - // Parent → Child - _, err := service.CreatePage("system", nil, "Parent", "parent") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) + // Update title only: content=nil, slug unchanged + if err := svc.UpdateNode("system", *id, "Documentation", "docs", nil); err != nil { + t.Fatalf("UpdateNode failed: %v", err) } - parent := service.GetTree().Children[0] - _, err = service.CreatePage("system", &parent.ID, "Child", "child") + raw, err := os.ReadFile(p) if err != nil { - t.Fatalf("CreatePage (child) failed: %v", err) + t.Fatalf("read: %v", err) } - - // Rekursiv löschen - err = service.DeletePage("system", parent.ID, true) + fm, _, has, err := ParseFrontmatter(string(raw)) if err != nil { - t.Fatalf("Expected recursive delete to succeed, got error: %v", err) + t.Fatalf("ParseFrontmatter: %v", err) } - - parentPath := filepath.Join(tmpDir, "root", "parent") - if _, err := os.Stat(parentPath); !os.IsNotExist(err) { - t.Errorf("Expected parent folder to be deleted") + if !has { + t.Fatalf("expected frontmatter") + } + if fm.LeafWikiTitle != "Documentation" { + t.Fatalf("expected leafwiki_title to be updated, got %q", fm.LeafWikiTitle) } } -func TestTreeService_MovePage_FileToFolder(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_UpdateNode_SlugRename_RenamesOnDisk(t *testing.T) { + svc, tmpDir := newLoadedService(t) - // Create root → a, root → b - _, err := service.CreatePage("system", nil, "A", "a") - if err != nil { - t.Fatalf("CreatePage A failed: %v", err) - } - _, err = service.CreatePage("system", nil, "B", "b") + id, err := svc.CreateNode("system", nil, "Docs", "docs", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage B failed: %v", err) + t.Fatalf("CreateNode failed: %v", err) } - a := service.GetTree().Children[0] - b := service.GetTree().Children[1] - - err = service.MovePage("system", a.ID, b.ID) - if err != nil { - t.Fatalf("MovePage failed: %v", err) - } + oldPath := filepath.Join(tmpDir, "root", "docs.md") + mustStat(t, oldPath) - // Erwartung: a ist jetzt unter b - if len(b.Children) != 1 || b.Children[0].ID != a.ID { - t.Errorf("Expected page A to be moved under B") + newSlug := "documentation" + if err := svc.UpdateNode("system", *id, "Docs", newSlug, nil); err != nil { + t.Fatalf("UpdateNode failed: %v", err) } - // Datei existiert im neuen Pfad - expected := filepath.Join(tmpDir, "root", "b", "a.md") - if _, err := os.Stat(expected); os.IsNotExist(err) { - t.Errorf("Expected moved file: %v", expected) - } + newPath := filepath.Join(tmpDir, "root", newSlug+".md") + mustStat(t, newPath) + mustNotExist(t, oldPath) } -func TestTreeService_MovePage_NonexistentPage(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +/* +Disable this test for now as we are not enforcing to pass the kinds yet. +func TestTreeService_UpdateNode_SectionToPage_DisallowedWithChildren(t *testing.T) { + svc, _ := newLoadedService(t) - // Create only one page - _, err := service.CreatePage("system", nil, "Target", "target") + // Create parent page, then child to force parent to section + parentID, err := svc.CreateNode("system", nil, "Docs", "docs", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("Create parent failed: %v", err) + } + _, err = svc.CreateNode("system", parentID, "Child", "child", ptrKind(NodeKindPage)) + if err != nil { + t.Fatalf("Create child failed: %v", err) } - target := service.GetTree().Children[0] - // Versuch mit ungültiger ID - err = service.MovePage("system", "does-not-exist", target.ID) + // Now parent is section with children, attempt to convert back to page + err = svc.UpdateNode("system", *parentID, "Docs", "docs", nil) if err == nil { - t.Error("Expected error for non-existent source page") + t.Fatalf("expected error converting section->page with children") + } + if !errors.Is(err, ErrPageHasChildren) { + t.Fatalf("expected ErrPageHasChildren, got: %v", err) } } +*/ -func TestTreeService_MovePage_NonexistentTarget(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_DeleteNode_NonRecursiveErrorsWhenHasChildren(t *testing.T) { + svc, _ := newLoadedService(t) - _, err := service.CreatePage("system", nil, "Source", "source") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - source := service.GetTree().Children[0] + parentID, _ := svc.CreateNode("system", nil, "Parent", "parent", ptrKind(NodeKindPage)) + _, _ = svc.CreateNode("system", parentID, "Child", "child", ptrKind(NodeKindPage)) - err = service.MovePage("system", source.ID, "invalid-target-id") + err := svc.DeleteNode("system", *parentID, false) if err == nil { - t.Error("Expected error for non-existent target") + t.Fatalf("expected error") + } + if !errors.Is(err, ErrPageHasChildren) { + t.Fatalf("expected ErrPageHasChildren, got: %v", err) } } -func TestTreeService_MovePage_SelfAsParent(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_DeleteNode_RecursiveDeletesDiskAndTree(t *testing.T) { + svc, tmpDir := newLoadedService(t) + + parentID, _ := svc.CreateNode("system", nil, "Parent", "parent", ptrKind(NodeKindPage)) + _, _ = svc.CreateNode("system", parentID, "Child", "child", ptrKind(NodeKindPage)) + + // Parent should now be a folder + parentDir := filepath.Join(tmpDir, "root", "parent") + mustStat(t, parentDir) - _, err := service.CreatePage("system", nil, "Loop", "loop") + err := svc.DeleteNode("system", *parentID, true) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("DeleteNode recursive failed: %v", err) } - node := service.GetTree().Children[0] - err = service.MovePage("system", node.ID, node.ID) - if err == nil { - t.Error("Expected error when moving page into itself (if you later implement such protection)") + // Folder should be gone + mustNotExist(t, parentDir) + + // Tree should have no children at root + if len(svc.GetTree().Children) != 0 { + t.Fatalf("expected root to have no children") } } -func TestTreeService_FindPageByRoutePath_Success(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_DeletePage_Leaf_Success_RemovesFileAndTreeAndReindexes(t *testing.T) { + svc, tmpDir := newLoadedService(t) - // Tree: root → architecture → project-a → specs - _, err := service.CreatePage("system", nil, "Architecture", "architecture") + // Create 3 leaf pages + idA, err := svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("CreateNode A: %v", err) } - arch := service.GetTree().Children[0] - - _, err = service.CreatePage("system", &arch.ID, "Project A", "project-a") + idB, err := svc.CreateNode("system", nil, "B", "b", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("CreateNode B: %v", err) } - projectA := arch.Children[0] - - _, err = service.CreatePage("system", &projectA.ID, "Specs", "specs") + idC, err := svc.CreateNode("system", nil, "C", "c", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("CreateNode C: %v", err) } - // Datei anlegen - specPath := filepath.Join(tmpDir, "root", "architecture", "project-a", "specs.md") - err = os.WriteFile(specPath, []byte("# Project A Specs"), 0644) - if err != nil { - t.Fatalf("Failed to write specs file: %v", err) + // Verify files exist + pathA := filepath.Join(tmpDir, "root", "a.md") + pathB := filepath.Join(tmpDir, "root", "b.md") + pathC := filepath.Join(tmpDir, "root", "c.md") + if _, err := os.Stat(pathB); err != nil { + t.Fatalf("expected %s exists: %v", pathB, err) } - // 🔍 Suche über RoutePath - page, err := service.FindPageByRoutePath(service.GetTree().Children, "architecture/project-a/specs") - if err != nil { - t.Fatalf("Expected page, got error: %v", err) + // Delete middle page (B) + if err := svc.DeleteNode("system", *idB, false); err != nil { + t.Fatalf("DeleteNode failed: %v", err) } - if page.Slug != "specs" || !strings.Contains(page.Content, "Specs") { - t.Errorf("Unexpected page content or slug") + // Disk: B gone; A/C still there + if _, err := os.Stat(pathB); !errors.Is(err, os.ErrNotExist) { + t.Fatalf("expected %s to be deleted, got err=%v", pathB, err) } -} - -func TestTreeService_FindPageByRoutePath_NotFound(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - - if _, err := service.CreatePage("system", nil, "Top", "top"); err != nil { - t.Fatalf("CreatePage failed: %v", err) + if _, err := os.Stat(pathA); err != nil { + t.Fatalf("expected %s exists: %v", pathA, err) } - - if _, err := service.FindPageByRoutePath(service.GetTree().Children, "top/missing"); err == nil { - t.Error("Expected error for non-existent nested path, got nil") + if _, err := os.Stat(pathC); err != nil { + t.Fatalf("expected %s exists: %v", pathC, err) } -} - -func TestTreeService_FindPageByRoutePath_PartialMatch(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - if _, err := service.CreatePage("system", nil, "Docs", "docs"); err != nil { - t.Fatalf("CreatePage failed: %v", err) + // Tree: only 2 children remain + root := svc.GetTree() + if len(root.Children) != 2 { + t.Fatalf("expected 2 children after delete, got %d", len(root.Children)) } - if _, err := service.CreatePage("system", nil, "API", "api"); err != nil { - t.Fatalf("CreatePage failed: %v", err) + // Ensure deleted ID not present + for _, ch := range root.Children { + if ch.ID == *idB { + t.Fatalf("deleted node still present in tree") + } } - if _, err := service.FindPageByRoutePath(service.GetTree().Children, "docs/should-not-exist"); err == nil { - t.Error("Expected error for unmatched subpath") + // Reindex: positions must be 0..1 (order depends on previous positions; we just assert contiguous) + if root.Children[0].Position != 0 || root.Children[1].Position != 1 { + t.Fatalf("expected positions reindexed to 0..1, got %d,%d", + root.Children[0].Position, root.Children[1].Position) } -} -func setupTestTree() *TreeService { - ts := NewTreeService(os.TempDir()) - ts.tree = &PageNode{ - ID: "root", - Title: "Root", - Children: []*PageNode{ - {ID: "a", Title: "A"}, - {ID: "b", Title: "B"}, - {ID: "c", Title: "C"}, - }, - } - return ts + // Optional: ensure remaining IDs are the ones we expect + _ = idA + _ = idC } -func TestTreeService_SortPages_ValidOrder(t *testing.T) { - ts := setupTestTree() +func TestTreeService_DeletePage_WithChildren_NonRecursive_ReturnsErrPageHasChildren(t *testing.T) { + svc, _ := newLoadedService(t) - err := ts.SortPages("root", []string{"c", "a", "b"}) + parentID, err := svc.CreateNode("system", nil, "Parent", "parent", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("unexpected error: %v", err) + t.Fatalf("CreateNode parent: %v", err) } - if ts.tree.Children[0].ID != "c" || ts.tree.Children[1].ID != "a" || ts.tree.Children[2].ID != "b" { - t.Errorf("unexpected order after sorting") + _, err = svc.CreateNode("system", parentID, "Child", "child", ptrKind(NodeKindPage)) + if err != nil { + t.Fatalf("CreateNode child: %v", err) } -} - -func TestTreeService_SortPages_InvalidLength(t *testing.T) { - ts := setupTestTree() - err := ts.SortPages("root", []string{"a", "b"}) + err = svc.DeleteNode("system", *parentID, false) if err == nil { - t.Errorf("expected error for invalid length, got nil") + t.Fatalf("expected error deleting page with children without recursive") } -} - -func TestTreeService_SortPages_InvalidID(t *testing.T) { - ts := setupTestTree() - - err := ts.SortPages("root", []string{"a", "b", "x"}) - if err == nil { - t.Errorf("expected error for invalid ID, got nil") + if !errors.Is(err, ErrPageHasChildren) { + t.Fatalf("expected ErrPageHasChildren, got: %v", err) } } -func TestTreeService_SortPages_DuplicateID(t *testing.T) { - ts := setupTestTree() - - err := ts.SortPages("root", []string{"a", "a", "b"}) - if err == nil { - t.Errorf("expected error for duplicate ID, got nil") - } -} +func TestTreeService_DeletePage_WithChildren_Recursive_DeletesFolder(t *testing.T) { + svc, tmpDir := newLoadedService(t) -func TestTreeService_SortPages_EmptyOK(t *testing.T) { - ts := NewTreeService(t.TempDir()) - ts.tree = &PageNode{ - ID: "root", - Title: "Root", - Children: []*PageNode{}, + parentID, err := svc.CreateNode("system", nil, "Parent", "parent", ptrKind(NodeKindPage)) + if err != nil { + t.Fatalf("CreateNode parent: %v", err) } - - err := ts.SortPages("root", []string{}) + _, err = svc.CreateNode("system", parentID, "Child", "child", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("unexpected error for empty list: %v", err) + t.Fatalf("CreateNode child: %v", err) } -} -func TestTreeService_SortPages_TreeNotLoaded(t *testing.T) { - ts := &TreeService{ - tree: nil, + // Parent was auto-converted to section -> folder should exist + parentDir := filepath.Join(tmpDir, "root", "parent") + if _, err := os.Stat(parentDir); err != nil { + t.Fatalf("expected parent dir exists (after auto-convert): %v", err) } - err := ts.SortPages("root", []string{"a"}) - if err == nil || !errors.Is(err, ErrTreeNotLoaded) { - t.Errorf("expected ErrTreeNotLoaded, got: %v", err) + // Recursive delete should remove the folder + if err := svc.DeleteNode("system", *parentID, true); err != nil { + t.Fatalf("DeleteNode recursive failed: %v", err) } -} - -func TestTreeService_LookupPath_Exists(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - - // Create tree structure - _, _ = service.CreatePage("system", nil, "Home", "home") - home := service.GetTree().Children[0] - _, _ = service.CreatePage("system", &home.ID, "About", "about") - about := home.Children[0] - _, _ = service.CreatePage("system", &about.ID, "Team", "team") - lookup, err := service.LookupPagePath(service.GetTree().Children, "home/about/team") - if err != nil { - t.Fatalf("unexpected error: %v", err) + if _, err := os.Stat(parentDir); !errors.Is(err, os.ErrNotExist) { + t.Fatalf("expected parent folder deleted, got err=%v", err) } - if !lookup.Exists { - t.Errorf("expected path to exist") - } - if len(lookup.Segments) != 3 { - t.Errorf("expected 3 segments, got %d", len(lookup.Segments)) - } - if !lookup.Segments[2].Exists || lookup.Segments[2].ID == nil || lookup.Segments[2].Slug != "team" { - t.Errorf("expected last segment to exist with correct Slug") + // Tree should no longer contain parent + if len(svc.GetTree().Children) != 0 { + t.Fatalf("expected root to have no children after delete, got %d", len(svc.GetTree().Children)) } } -func TestTreeService_LookupPath_NotExists(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - - // Create tree structure - _, _ = service.CreatePage("system", nil, "Home", "home") - home := service.GetTree().Children[0] - _, _ = service.CreatePage("system", &home.ID, "About", "about") - - lookup, err := service.LookupPagePath(service.GetTree().Children, "home/about/contact") - if err != nil { - t.Fatalf("unexpected error: %v", err) - } +func TestTreeService_DeletePage_InvalidID_ReturnsErrPageNotFound(t *testing.T) { + svc, _ := newLoadedService(t) - if lookup.Exists { - t.Errorf("expected path to not exist") - } - if len(lookup.Segments) != 3 { - t.Errorf("expected 3 segments, got %d", len(lookup.Segments)) - } - if !lookup.Segments[1].Exists || lookup.Segments[1].ID == nil || lookup.Segments[1].Slug != "about" { - t.Errorf("expected second segment to exist with correct Slug") + err := svc.DeleteNode("system", "does-not-exist", false) + if err == nil { + t.Fatalf("expected error") } - if lookup.Segments[2].Exists || lookup.Segments[2].ID != nil || lookup.Segments[2].Slug != "contact" { - t.Errorf("expected last segment to not exist with correct Slug") + if !errors.Is(err, ErrPageNotFound) { + t.Fatalf("expected ErrPageNotFound, got: %v", err) } } -func TestTreeService_LookupPath_EmptyPath(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_DeletePage_Drift_FileMissing_ReturnsError(t *testing.T) { + svc, tmpDir := newLoadedService(t) - lookup, err := service.LookupPagePath(service.GetTree().Children, "") + // Create a leaf page normally (creates file) + id, err := svc.CreateNode("system", nil, "Ghost", "ghost", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("unexpected error: %v", err) + t.Fatalf("CreateNode: %v", err) } - if lookup.Exists { - t.Errorf("expected empty path to not exist") - } - if len(lookup.Segments) != 0 { - t.Errorf("expected 0 segments, got %d", len(lookup.Segments)) + // Delete the file manually to simulate drift + p := filepath.Join(tmpDir, "root", "ghost.md") + if err := os.Remove(p); err != nil { + t.Fatalf("failed to remove file to simulate drift: %v", err) } -} - -func TestTreeService_LookupPath_DeeperMissingPath(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - _, _ = service.CreatePage("system", nil, "Home", "home") - home := service.GetTree().Children[0] - _, _ = service.CreatePage("system", &home.ID, "About", "about") - - lookup, err := service.LookupPagePath(service.GetTree().Children, "home/about/team/members") - if err != nil { - t.Fatalf("unexpected error: %v", err) - } - - if lookup.Exists { - t.Errorf("expected path to not exist") - } - if len(lookup.Segments) != 4 { - t.Errorf("expected 4 segments, got %d", len(lookup.Segments)) - } - if !lookup.Segments[1].Exists || lookup.Segments[1].ID == nil || lookup.Segments[1].Slug != "about" { - t.Errorf("expected second segment to exist with correct Slug") - } - if lookup.Segments[2].Exists || lookup.Segments[2].ID != nil || lookup.Segments[2].Slug != "team" { - t.Errorf("expected third segment to not exist with correct Slug") + // Now delete node - should error (drift) + err = svc.DeleteNode("system", *id, false) + if err == nil { + t.Fatalf("expected drift error") } - if lookup.Segments[3].Exists || lookup.Segments[3].ID != nil || lookup.Segments[3].Slug != "members" { - t.Errorf("expected last segment to not exist with correct Slug") + // If you have a concrete DriftError type, you can assert with errors.As. + var dErr *DriftError + if !errors.As(err, &dErr) { + t.Fatalf("expected DriftError, got: %T (%v)", err, err) } } -func TestTreeService_LookupPath_OnlyOneSegment(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +// --- C) Move semantics --- - _, _ = service.CreatePage("system", nil, "Home", "home") +func TestTreeService_MoveNode_TargetPageAutoConvertsToSection(t *testing.T) { + svc, tmpDir := newLoadedService(t) - lookup, err := service.LookupPagePath(service.GetTree().Children, "home") - if err != nil { - t.Fatalf("unexpected error: %v", err) - } + aID, _ := svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) + bID, _ := svc.CreateNode("system", nil, "B", "b", ptrKind(NodeKindPage)) - if !lookup.Exists { - t.Errorf("expected path to exist") - } - if len(lookup.Segments) != 1 { - t.Errorf("expected 1 segment, got %d", len(lookup.Segments)) - } - if !lookup.Segments[0].Exists || lookup.Segments[0].ID == nil || lookup.Segments[0].Slug != "home" { - t.Errorf("expected segment to exist with correct Slug") + // Move A under B (B is a page => should auto-convert to section) + if err := svc.MoveNode("system", *aID, *bID); err != nil { + t.Fatalf("MoveNode failed: %v", err) } -} -func TestTreeService_EnsurePagePath_Successful(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() + // B should now be folder with index.md + bDir := filepath.Join(tmpDir, "root", "b") + mustStat(t, bDir) + mustStat(t, filepath.Join(bDir, "index.md")) - _, _ = service.CreatePage("system", nil, "Home", "home") - home := service.GetTree().Children[0] - _, _ = service.CreatePage("system", &home.ID, "About", "about") + // A should now be inside B folder + aPath := filepath.Join(bDir, "a.md") + mustStat(t, aPath) +} - result, err := service.EnsurePagePath("system", "home/about/team", "Team") - if err != nil { - t.Fatalf("unexpected error: %v", err) - } +func TestTreeService_MoveNode_PreventsCircularReference(t *testing.T) { + svc, _ := newLoadedService(t) - if !result.Exists { - t.Errorf("expected path to exist after creation") - } - if result.Page == nil || result.Page.Slug != "team" || result.Page.Title != "Team" { - t.Errorf("expected created page with correct Slug and Title") - } + aID, _ := svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) + // create child under A so A becomes section and has child + bID, _ := svc.CreateNode("system", aID, "B", "b", ptrKind(NodeKindPage)) - // Verify the page was actually created in the tree - about := home.Children[0] - if len(about.Children) != 1 || about.Children[0].Slug != "team" { - t.Errorf("expected 'team' page to be a child of 'about'") + // Try move A under B (A -> ... -> B). Should error with circular reference. + err := svc.MoveNode("system", *aID, *bID) + if err == nil { + t.Fatalf("expected error moving node under its descendant") + } + if !errors.Is(err, ErrMovePageCircularReference) { + t.Fatalf("expected ErrMovePageCircularReference, got: %v", err) } } -func TestTreeService_EnsurePagePath_AlreadyExists(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_MoveNode_PreventsSelfParent(t *testing.T) { + svc, _ := newLoadedService(t) - _, _ = service.CreatePage("system", nil, "Home", "home") - home := service.GetTree().Children[0] - _, _ = service.CreatePage("system", &home.ID, "About", "about") - about := home.Children[0] - _, _ = service.CreatePage("system", &about.ID, "Team", "team") - - result, err := service.EnsurePagePath("system", "home/about/team", "Team") - if err != nil { - t.Fatalf("unexpected error: %v", err) - } + aID, _ := svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) - if !result.Exists { - t.Errorf("expected path to exist") + err := svc.MoveNode("system", *aID, *aID) + if err == nil { + t.Fatalf("expected error moving node into itself") } - if result.Page == nil || result.Page.Slug != "team" { - t.Errorf("expected existing page with correct Slug") + if !errors.Is(err, ErrPageCannotBeMovedToItself) { + t.Fatalf("expected ErrPageCannotBeMovedToItself, got: %v", err) } } -func TestTreeService_EnsurePagePath_PartialExistence(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +// --- D) SortPages --- - _, _ = service.CreatePage("system", nil, "Home", "home") - home := service.GetTree().Children[0] - _, _ = service.CreatePage("system", &home.ID, "About", "about") +func TestTreeService_SortPages_ValidOrder(t *testing.T) { + svc, _ := newLoadedService(t) - result, err := service.EnsurePagePath("system", "home/about/team/members", "Members") - if err != nil { - t.Fatalf("unexpected error: %v", err) - } + idA, _ := svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) + idB, _ := svc.CreateNode("system", nil, "B", "b", ptrKind(NodeKindPage)) + idC, _ := svc.CreateNode("system", nil, "C", "c", ptrKind(NodeKindPage)) - if !result.Exists { - t.Errorf("expected full path to exist after creation") - } - if result.Page == nil || result.Page.Slug != "members" || result.Page.Title != "Members" { - t.Errorf("expected created 'members' page with correct Slug and Title") + err := svc.SortPages("root", []string{*idC, *idA, *idB}) + if err != nil { + t.Fatalf("SortPages failed: %v", err) } - // Verify the intermediate 'team' page was also created - about := home.Children[0] - if len(about.Children) != 1 || about.Children[0].Slug != "team" { - t.Errorf("expected 'team' page to be a child of 'about'") + root := svc.GetTree() + if root.Children[0].ID != *idC || root.Children[1].ID != *idA || root.Children[2].ID != *idB { + t.Fatalf("unexpected order after sort") } - team := about.Children[0] - if len(team.Children) != 1 || team.Children[0].Slug != "members" { - t.Errorf("expected 'members' page to be a child of 'team'") + if root.Children[0].Position != 0 || root.Children[1].Position != 1 || root.Children[2].Position != 2 { + t.Fatalf("expected positions to be reindexed") } } -func TestTreeService_EnsurePagePath_EmptyPath(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_SortPages_InvalidLength(t *testing.T) { + svc, _ := newLoadedService(t) - result, err := service.EnsurePagePath("system", "", "Root") + _, _ = svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) + _, _ = svc.CreateNode("system", nil, "B", "b", ptrKind(NodeKindPage)) + + err := svc.SortPages("root", []string{"only-one"}) if err == nil { - t.Fatalf("expected error for empty path, got nil") + t.Fatalf("expected error for invalid length") } - - if result != nil { - t.Errorf("expected nil result for empty path") + if !errors.Is(err, ErrInvalidSortOrder) { + t.Fatalf("expected ErrInvalidSortOrder, got: %v", err) } } -func TestTreeService_EnsurePagePath_PathStartingWithSlash(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_SortPages_DuplicateID(t *testing.T) { + svc, _ := newLoadedService(t) - result, err := service.EnsurePagePath("system", "/leading/slash", "Invalid") - if err != nil { - t.Fatalf("expected error for invalid path, got nil") - } + idA, _ := svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) + idB, _ := svc.CreateNode("system", nil, "B", "b", ptrKind(NodeKindPage)) - if result == nil { - t.Errorf("expected nil result for invalid path") + err := svc.SortPages("root", []string{*idA, *idA, *idB}) + if err == nil { + t.Fatalf("expected error for duplicate IDs") } } -func TestTreeService_MigrateToV2_PagesWithoutFrontmatter(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - - // Create pages without frontmatter - _, err := service.CreatePage("system", nil, "Page1", "page1") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - page1 := service.GetTree().Children[0] - - _, err = service.CreatePage("system", &page1.ID, "Page2", "page2") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - page2 := page1.Children[0] - - // Write content without frontmatter - page1Path := filepath.Join(tmpDir, "root", "page1.md") - page2Path := filepath.Join(tmpDir, "root", "page1", "page2.md") +// --- E) Routing, Lookup, Ensure --- - err = os.WriteFile(page1Path, []byte("# Page 1 Content\nHello World"), 0644) - if err != nil { - t.Fatalf("Failed to write page1: %v", err) - } - - err = os.WriteFile(page2Path, []byte("# Page 2 Content\nNested content"), 0644) - if err != nil { - t.Fatalf("Failed to write page2: %v", err) - } +func TestTreeService_FindPageByRoutePath_ReturnsContent(t *testing.T) { + svc, _ := newLoadedService(t) - // Run migration - err = service.migrateToV2() - if err != nil { - t.Fatalf("migrateToV2 failed: %v", err) - } + archID, _ := svc.CreateNode("system", nil, "Architecture", "architecture", ptrKind(NodeKindPage)) + // create child -> converts arch to section + projectID, _ := svc.CreateNode("system", archID, "Project A", "project-a", ptrKind(NodeKindPage)) + _, _ = svc.CreateNode("system", projectID, "Specs", "specs", ptrKind(NodeKindPage)) - // Verify frontmatter was added to page1 - content1, err := os.ReadFile(page1Path) - if err != nil { - t.Fatalf("Failed to read page1 after migration: %v", err) - } - fm1, body1, has1, err := ParseFrontmatter(string(content1)) - if err != nil { - t.Fatalf("Failed to parse frontmatter for page1: %v", err) - } - if !has1 { - t.Error("Expected page1 to have frontmatter after migration") - } - if fm1.LeafWikiID != page1.ID { - t.Errorf("Expected page1 frontmatter ID to be %s, got %s", page1.ID, fm1.LeafWikiID) - } - if fm1.LeafWikiTitle != "Page1" { - t.Errorf("Expected page1 frontmatter title to be 'Page1', got %s", fm1.LeafWikiTitle) - } - if !strings.Contains(body1, "# Page 1 Content") { - t.Error("Expected page1 body to be preserved") + // Update specs content + specsNode := svc.GetTree().Children[0].Children[0].Children[0] + body := "# Specs\nHello" + if err := svc.UpdateNode("system", specsNode.ID, "Specs", "specs", &body); err != nil { + t.Fatalf("UpdateNode content failed: %v", err) } - // Verify frontmatter was added to page2 - content2, err := os.ReadFile(page2Path) - if err != nil { - t.Fatalf("Failed to read page2 after migration: %v", err) - } - fm2, body2, has2, err := ParseFrontmatter(string(content2)) + page, err := svc.FindPageByRoutePath(svc.GetTree().Children, "architecture/project-a/specs") if err != nil { - t.Fatalf("Failed to parse frontmatter for page2: %v", err) - } - if !has2 { - t.Error("Expected page2 to have frontmatter after migration") + t.Fatalf("FindPageByRoutePath failed: %v", err) } - if fm2.LeafWikiID != page2.ID { - t.Errorf("Expected page2 frontmatter ID to be %s, got %s", page2.ID, fm2.LeafWikiID) + if page.Slug != "specs" { + t.Fatalf("expected slug specs, got %q", page.Slug) } - if !strings.Contains(body2, "# Page 2 Content") { - t.Error("Expected page2 body to be preserved") + if !strings.Contains(page.Content, "Hello") { + t.Fatalf("expected content to include Hello, got: %q", page.Content) } } -func TestTreeService_MigrateToV2_PagesWithExistingFrontmatter(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - - // Create page - _, err := service.CreatePage("system", nil, "Page1", "page1") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - page1 := service.GetTree().Children[0] - - // Write content with existing frontmatter - page1Path := filepath.Join(tmpDir, "root", "page1.md") - existingContent := "---\nleafwiki_id: " + page1.ID + "\nleafwiki_title: Custom Title\n---\n# Page 1 Content" - err = os.WriteFile(page1Path, []byte(existingContent), 0644) - if err != nil { - t.Fatalf("Failed to write page1: %v", err) - } +func TestTreeService_LookupPagePath_Segments(t *testing.T) { + svc, _ := newLoadedService(t) - // Run migration - err = service.migrateToV2() - if err != nil { - t.Fatalf("migrateToV2 failed: %v", err) - } + homeID, _ := svc.CreateNode("system", nil, "Home", "home", ptrKind(NodeKindPage)) + _, _ = svc.CreateNode("system", homeID, "About", "about", ptrKind(NodeKindPage)) - // Verify frontmatter was not modified (should be unchanged) - content1, err := os.ReadFile(page1Path) + lookup, err := svc.LookupPagePath(svc.GetTree().Children, "home/about/team") if err != nil { - t.Fatalf("Failed to read page1 after migration: %v", err) + t.Fatalf("LookupPagePath failed: %v", err) } - fm1, body1, has1, err := ParseFrontmatter(string(content1)) - if err != nil { - t.Fatalf("Failed to parse frontmatter for page1: %v", err) + if lookup.Exists { + t.Fatalf("expected full path to not exist") } - if !has1 { - t.Error("Expected page1 to have frontmatter after migration") + if len(lookup.Segments) != 3 { + t.Fatalf("expected 3 segments, got %d", len(lookup.Segments)) } - if fm1.LeafWikiID != page1.ID { - t.Errorf("Expected page1 frontmatter ID to be %s, got %s", page1.ID, fm1.LeafWikiID) + if !lookup.Segments[0].Exists || lookup.Segments[0].ID == nil { + t.Fatalf("expected home segment to exist with ID") } - if fm1.LeafWikiTitle != "Custom Title" { - t.Errorf("Expected page1 frontmatter title to be 'Custom Title', got %s", fm1.LeafWikiTitle) + if !lookup.Segments[1].Exists || lookup.Segments[1].ID == nil { + t.Fatalf("expected about segment to exist with ID") } - if !strings.Contains(body1, "# Page 1 Content") { - t.Error("Expected page1 body to be preserved") + if lookup.Segments[2].Exists || lookup.Segments[2].ID != nil { + t.Fatalf("expected team to not exist") } } -func TestTreeService_MigrateToV2_MissingFiles(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_EnsurePagePath_CreatesIntermediateSectionsAndFinalPage(t *testing.T) { + svc, _ := newLoadedService(t) - // Create a page and its file - _, err := service.CreatePage("system", nil, "Page1", "page1") + // Ensure a deep path; intermediate nodes should become sections + res, err := svc.EnsurePagePath("system", "home/about/team/members", "Members", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("EnsurePagePath failed: %v", err) } - - // Write content to page1 - page1Path := filepath.Join(tmpDir, "root", "page1.md") - err = os.WriteFile(page1Path, []byte("# Page 1 Content"), 0644) - if err != nil { - t.Fatalf("Failed to write page1: %v", err) - } - - // Create a page with a child - _, err = service.CreatePage("system", nil, "Parent", "parent") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - parent := service.GetTree().Children[1] - - _, err = service.CreatePage("system", &parent.ID, "Child", "child") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - - // Write content to child without frontmatter - childPath := filepath.Join(tmpDir, "root", "parent", "child.md") - err = os.WriteFile(childPath, []byte("# Child Content"), 0644) - if err != nil { - t.Fatalf("Failed to write child: %v", err) - } - - // Remove the parent index.md file (parent has children so it's in a folder) - parentIndexPath := filepath.Join(tmpDir, "root", "parent", "index.md") - if _, err := os.Stat(parentIndexPath); err == nil { - os.Remove(parentIndexPath) - } - - // Run migration - should handle missing parent file gracefully and still migrate child - err = service.migrateToV2() - if err != nil { - t.Fatalf("migrateToV2 should handle missing files gracefully, got error: %v", err) + if res.Page == nil || res.Page.Slug != "members" { + t.Fatalf("expected final page 'members'") } - // Verify page1 was migrated - content1, err := os.ReadFile(page1Path) - if err != nil { - t.Fatalf("Failed to read page1 after migration: %v", err) - } - _, _, has1, err := ParseFrontmatter(string(content1)) + // home/about/team should exist as path now + lookup, err := svc.LookupPagePath(svc.GetTree().Children, "home/about/team/members") if err != nil { - t.Fatalf("Failed to parse frontmatter for page1: %v", err) + t.Fatalf("LookupPagePath failed: %v", err) } - if !has1 { - t.Error("Expected page1 to have frontmatter after migration") + if !lookup.Exists { + t.Fatalf("expected path to exist after EnsurePagePath") } +} - // Verify child was still migrated even though parent file is missing - childContent, err := os.ReadFile(childPath) - if err != nil { - t.Fatalf("Failed to read child after migration: %v", err) - } - _, _, hasChild, err := ParseFrontmatter(string(childContent)) - if err != nil { - t.Fatalf("Failed to parse frontmatter for child: %v", err) +// --- F) Migration V2 (frontmatter backfill) --- +func TestTreeService_LoadTree_MigratesToV2_AddsFrontmatterAndPreservesBody(t *testing.T) { + if CurrentSchemaVersion < 2 { + t.Skip("requires schema v2+") } - if !hasChild { - t.Error("Expected child to have frontmatter after migration even if parent file is missing") - } -} -func TestTreeService_MigrateToV2_SkipsNonExistentFiles(t *testing.T) { tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - // Create a simple page - _, err := service.CreatePage("system", nil, "Page1", "page1") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) + // start on v1 (or generally: current-1) + if err := saveSchema(tmpDir, CurrentSchemaVersion-1); err != nil { + t.Fatalf("saveSchema failed: %v", err) } - page1 := service.GetTree().Children[0] - // Write content without frontmatter - page1Path := filepath.Join(tmpDir, "root", "page1.md") - err = os.WriteFile(page1Path, []byte("# Page 1 Content"), 0644) - if err != nil { - t.Fatalf("Failed to write page1: %v", err) - } - - // Manually add a node to the tree without creating its file - // This simulates a corrupted tree structure - ghostNode := &PageNode{ - ID: "ghost-node", - Title: "Ghost", - Slug: "ghost", - Parent: service.tree, + svc := NewTreeService(tmpDir) + if err := svc.LoadTree(); err != nil { + t.Fatalf("LoadTree failed: %v", err) } - service.tree.Children = append(service.tree.Children, ghostNode) - err = service.migrateToV2() + id, err := svc.CreateNode("system", nil, "Page1", "page1", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("Expected migration to skip missing files gracefully, got error: %v", err) + t.Fatalf("CreateNode failed: %v", err) } - // page1 should have frontmatter now - content1, err := os.ReadFile(page1Path) - if err != nil { - t.Fatalf("Failed to read page1 after migration: %v", err) - } - fm1, body1, has1, err := ParseFrontmatter(string(content1)) - if err != nil { - t.Fatalf("Failed to parse frontmatter for page1: %v", err) - } - if !has1 { - t.Fatal("Expected page1 to have frontmatter after migration") - } - if fm1.LeafWikiID != page1.ID { - t.Fatalf("Expected leafwiki_id %q, got %q", page1.ID, fm1.LeafWikiID) - } - if !strings.Contains(body1, "# Page 1 Content") { - t.Fatalf("Expected body to be preserved") + // IMPORTANT: persist tree so the next service instance sees the node + if err := svc.SaveTree(); err != nil { + t.Fatalf("SaveTree failed: %v", err) } -} - -func TestTreeService_MigrateToV2_TreeNotLoaded(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - // Do NOT load tree - // Run migration should fail - err := service.migrateToV2() - if err == nil { - t.Error("Expected error when tree is not loaded") - } - if !errors.Is(err, ErrTreeNotLoaded) { - t.Errorf("Expected ErrTreeNotLoaded, got: %v", err) + // overwrite file without FM + pagePath := filepath.Join(tmpDir, "root", "page1.md") + body := "# Page 1 Content\nHello World\n" + if err := os.WriteFile(pagePath, []byte(body), 0o644); err != nil { + t.Fatalf("write old content failed: %v", err) } -} - -func TestTreeService_MigrateToV2_PartialFrontmatter(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - // Create page - _, err := service.CreatePage("system", nil, "Page1", "page1") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) + // force schema old again + if err := saveSchema(tmpDir, CurrentSchemaVersion-1); err != nil { + t.Fatalf("saveSchema failed: %v", err) } - page1 := service.GetTree().Children[0] - // Write content with partial frontmatter (missing ID) - page1Path := filepath.Join(tmpDir, "root", "page1.md") - partialContent := "---\nleafwiki_title: Existing Title\n---\n# Page 1 Content" - err = os.WriteFile(page1Path, []byte(partialContent), 0644) - if err != nil { - t.Fatalf("Failed to write page1: %v", err) + loaded := NewTreeService(tmpDir) + if err := loaded.LoadTree(); err != nil { + t.Fatalf("LoadTree (migrating) failed: %v", err) } - // Run migration - err = service.migrateToV2() + raw, err := os.ReadFile(pagePath) if err != nil { - t.Fatalf("migrateToV2 failed: %v", err) + t.Fatalf("read migrated file: %v", err) } - // Verify ID was added but title was preserved - content1, err := os.ReadFile(page1Path) + fm, migratedBody, has, err := ParseFrontmatter(string(raw)) if err != nil { - t.Fatalf("Failed to read page1 after migration: %v", err) + t.Fatalf("ParseFrontmatter: %v", err) } - fm1, _, _, err := ParseFrontmatter(string(content1)) - if err != nil { - t.Fatalf("Failed to parse frontmatter for page1: %v", err) + if !has { + t.Fatalf("expected frontmatter after migration, got:\n%s", string(raw)) } - if fm1.LeafWikiID != page1.ID { - t.Errorf("Expected page1 frontmatter ID to be added: %s, got %s", page1.ID, fm1.LeafWikiID) + if fm.LeafWikiID != *id { + t.Fatalf("expected leafwiki_id=%q, got %q", *id, fm.LeafWikiID) } - if fm1.LeafWikiTitle != "Existing Title" { - t.Errorf("Expected page1 frontmatter title to be preserved: 'Existing Title', got %s", fm1.LeafWikiTitle) + if strings.TrimSpace(fm.LeafWikiTitle) == "" { + t.Fatalf("expected leafwiki_title to be set") } -} - -func TestTreeService_MigrateToV2_EmptyTree(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - - // Run migration on empty tree (only root, no children) - err := service.migrateToV2() - if err != nil { - t.Fatalf("migrateToV2 should succeed on empty tree, got error: %v", err) + if migratedBody != body { + t.Fatalf("expected body preserved exactly.\nGot:\n%q\nWant:\n%q", migratedBody, body) } } -func TestTreeService_MigrateToV2_PreservesBodyContent(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - - // Create page - _, err := service.CreatePage("system", nil, "Page1", "page1") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - - // Write complex content without frontmatter - page1Path := filepath.Join(tmpDir, "root", "page1.md") - complexContent := `# Title +// --- small util --- -This is a paragraph. - -## Section 1 - -- Item 1 -- Item 2 - -` + "```go\nfunc main() {\n\tfmt.Println(\"Hello\")\n}\n```" + ` - -### Subsection - -More content here. - ---- - -Horizontal rule above. -` - err = os.WriteFile(page1Path, []byte(complexContent), 0644) - if err != nil { - t.Fatalf("Failed to write page1: %v", err) - } - - // Run migration - err = service.migrateToV2() - if err != nil { - t.Fatalf("migrateToV2 failed: %v", err) - } - - // Verify body content is exactly preserved - content1, err := os.ReadFile(page1Path) - if err != nil { - t.Fatalf("Failed to read page1 after migration: %v", err) - } - _, body1, _, err := ParseFrontmatter(string(content1)) - if err != nil { - t.Fatalf("Failed to parse frontmatter for page1: %v", err) - } - if body1 != complexContent { - t.Errorf("Expected body to be exactly preserved.\nGot:\n%s\n\nWant:\n%s", body1, complexContent) - } -} +func ptrKind(k NodeKind) *NodeKind { return &k } diff --git a/internal/http/api/create_page.go b/internal/http/api/create_page.go index 702c95e4..825b4dce 100644 --- a/internal/http/api/create_page.go +++ b/internal/http/api/create_page.go @@ -4,6 +4,7 @@ import ( "net/http" "github.com/gin-gonic/gin" + "github.com/perber/wiki/internal/core/tree" auth_middleware "github.com/perber/wiki/internal/http/middleware/auth" "github.com/perber/wiki/internal/wiki" ) @@ -27,7 +28,8 @@ func CreatePageHandler(w *wiki.Wiki) gin.HandlerFunc { return } - page, err := w.CreatePage(user.ID, req.ParentID, req.Title, req.Slug) + kind := tree.NodeKindPage + page, err := w.CreatePage(user.ID, req.ParentID, req.Title, req.Slug, &kind) if err != nil { respondWithError(c, err) return diff --git a/internal/http/api/ensure_page.go b/internal/http/api/ensure_page.go index db56f85f..a13ee13e 100644 --- a/internal/http/api/ensure_page.go +++ b/internal/http/api/ensure_page.go @@ -4,6 +4,7 @@ import ( "net/http" "github.com/gin-gonic/gin" + "github.com/perber/wiki/internal/core/tree" auth_middleware "github.com/perber/wiki/internal/http/middleware/auth" "github.com/perber/wiki/internal/wiki" ) @@ -26,7 +27,8 @@ func EnsurePageHandler(w *wiki.Wiki) gin.HandlerFunc { return } - result, err := w.EnsurePath(user.ID, req.Path, req.TargetTitle) + kind := tree.NodeKindPage + result, err := w.EnsurePath(user.ID, req.Path, req.TargetTitle, &kind) if err != nil { respondWithError(c, err) return diff --git a/internal/http/api/update_page.go b/internal/http/api/update_page.go index ded576bb..d49a0cc6 100644 --- a/internal/http/api/update_page.go +++ b/internal/http/api/update_page.go @@ -4,6 +4,7 @@ import ( "net/http" "github.com/gin-gonic/gin" + "github.com/perber/wiki/internal/core/tree" auth_middleware "github.com/perber/wiki/internal/http/middleware/auth" "github.com/perber/wiki/internal/wiki" ) @@ -13,9 +14,9 @@ func UpdatePageHandler(w *wiki.Wiki) gin.HandlerFunc { id := c.Param("id") var req struct { - Title string `json:"title" binding:"required"` - Slug string `json:"slug" binding:"required"` - Content string `json:"content"` + Title string `json:"title" binding:"required"` + Slug string `json:"slug" binding:"required"` + Content *string `json:"content"` } if err := c.ShouldBindJSON(&req); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid payload"}) @@ -27,7 +28,8 @@ func UpdatePageHandler(w *wiki.Wiki) gin.HandlerFunc { return } - page, err := w.UpdatePage(user.ID, id, req.Title, req.Slug, req.Content) + kind := tree.NodeKindPage + page, err := w.UpdatePage(user.ID, id, req.Title, req.Slug, req.Content, &kind) if err != nil { respondWithError(c, err) return diff --git a/internal/http/router_test.go b/internal/http/router_test.go index cefcc288..9ce24efa 100644 --- a/internal/http/router_test.go +++ b/internal/http/router_test.go @@ -12,9 +12,15 @@ import ( "time" "github.com/gin-gonic/gin" + "github.com/perber/wiki/internal/core/tree" "github.com/perber/wiki/internal/wiki" ) +func pageNodeKind() *tree.NodeKind { + kind := tree.NodeKindPage + return &kind +} + func createWikiTestInstance(t *testing.T) *wiki.Wiki { w, err := wiki.NewWiki(&wiki.WikiOptions{ StorageDir: t.TempDir(), @@ -306,7 +312,7 @@ func TestDeletePageEndpoint(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - _, err := w.CreatePage("system", nil, "Delete Me", "delete-me") + _, err := w.CreatePage("system", nil, "Delete Me", "delete-me", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -339,11 +345,11 @@ func TestDeletePageEndpoint_HasChildren(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - parent, err := w.CreatePage("system", nil, "Parent", "parent") + parent, err := w.CreatePage("system", nil, "Parent", "parent", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } - _, err = w.CreatePage("system", &parent.ID, "Child", "child") + _, err = w.CreatePage("system", &parent.ID, "Child", "child", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -360,11 +366,11 @@ func TestDeletePageEndpoint_Recursive(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - parent, err := w.CreatePage("system", nil, "Parent", "parent") + parent, err := w.CreatePage("system", nil, "Parent", "parent", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } - _, err = w.CreatePage("system", &parent.ID, "Child", "child") + _, err = w.CreatePage("system", &parent.ID, "Child", "child", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -384,7 +390,7 @@ func TestUpdatePageEndpoint(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - _, err := w.CreatePage("system", nil, "Original Title", "original-title") + _, err := w.CreatePage("system", nil, "Original Title", "original-title", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -437,7 +443,7 @@ func TestUpdatePage_SlugRemainsIfUnchanged(t *testing.T) { router := createRouterTestInstance(w, t) // Create a page - created, err := w.CreatePage("system", nil, "Immutable Slug", "immutable-slug") + created, err := w.CreatePage("system", nil, "Immutable Slug", "immutable-slug", pageNodeKind()) if err != nil { t.Fatalf("Failed to create page: %v", err) } @@ -471,13 +477,13 @@ func TestUpdatePage_PageAlreadyExists(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - _, err := w.CreatePage("system", nil, "Original Title", "original-title") + _, err := w.CreatePage("system", nil, "Original Title", "original-title", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } page := w.GetTree().Children[0] - _, err = w.CreatePage("system", nil, "Conflict Title", "conflict-title") + _, err = w.CreatePage("system", nil, "Conflict Title", "conflict-title", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -540,7 +546,7 @@ func TestGetPageEndpoint(t *testing.T) { router := createRouterTestInstance(w, t) // Create a page - _, err := w.CreatePage("system", nil, "Welcome", "welcome") + _, err := w.CreatePage("system", nil, "Welcome", "welcome", pageNodeKind()) if err != nil { t.Fatalf("Failed to create page: %v", err) } @@ -602,11 +608,11 @@ func TestMovePageEndpoint(t *testing.T) { router := createRouterTestInstance(w, t) // Create two pages a and b - _, err := w.CreatePage("system", nil, "Section A", "section-a") + _, err := w.CreatePage("system", nil, "Section A", "section-a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } - _, err = w.CreatePage("system", nil, "Section B", "section-b") + _, err = w.CreatePage("system", nil, "Section B", "section-b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -668,7 +674,7 @@ func TestMovePageEndpoint_ParentNotFound(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - _, err := w.CreatePage("system", nil, "Section A", "section-a") + _, err := w.CreatePage("system", nil, "Section A", "section-a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -689,13 +695,13 @@ func TestMovePageEndpoint_CircularReference(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - _, err := w.CreatePage("system", nil, "Section A", "section-a") + _, err := w.CreatePage("system", nil, "Section A", "section-a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } a := w.GetTree().Children[0] - _, err = w.CreatePage("system", &a.ID, "Section B", "section-b") + _, err = w.CreatePage("system", &a.ID, "Section B", "section-b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -714,19 +720,19 @@ func TestMovePage_FailsIfTargetAlreadyHasPageWithSameSlug(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - _, err := w.CreatePage("system", nil, "Section A", "section-a") + _, err := w.CreatePage("system", nil, "Section A", "section-a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } a := w.GetTree().Children[0] - _, err = w.CreatePage("system", nil, "Section B", "section-b") + _, err = w.CreatePage("system", nil, "Section B", "section-b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } // Create Conflict Page in b - conflictPage, err := w.CreatePage("system", &a.ID, "Section B", "section-b") + conflictPage, err := w.CreatePage("system", &a.ID, "Section B", "section-b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -744,7 +750,7 @@ func TestMovePage_InTheSamePlace(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - _, err := w.CreatePage("system", nil, "Section A", "section-a") + _, err := w.CreatePage("system", nil, "Section A", "section-a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -763,15 +769,15 @@ func TestSortPagesEndpoint(t *testing.T) { router := createRouterTestInstance(w, t) // Create pages - page1, err := w.CreatePage("system", nil, "Page 1", "page-1") + page1, err := w.CreatePage("system", nil, "Page 1", "page-1", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } - page2, err := w.CreatePage("system", nil, "Page 2", "page-2") + page2, err := w.CreatePage("system", nil, "Page 2", "page-2", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } - page3, err := w.CreatePage("system", nil, "Page 3", "page-3") + page3, err := w.CreatePage("system", nil, "Page 3", "page-3", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -1342,7 +1348,7 @@ func TestAssetEndpoints(t *testing.T) { } // Step 1: Create page direkt über Wiki-API - page, err := w.CreatePage("system", nil, "Assets Page", "assets-page") + page, err := w.CreatePage("system", nil, "Assets Page", "assets-page", pageNodeKind()) if err != nil { t.Fatalf("Failed to create page: %v", err) } diff --git a/internal/links/link_service_test.go b/internal/links/link_service_test.go index c9517574..37118b16 100644 --- a/internal/links/link_service_test.go +++ b/internal/links/link_service_test.go @@ -6,6 +6,11 @@ import ( "github.com/perber/wiki/internal/core/tree" ) +func pageNodeKind() *tree.NodeKind { + kind := tree.NodeKindPage + return &kind +} + func TestExtractLinksFromMarkdown_FiltersExternalAndNormalizes(t *testing.T) { md := ` # Example @@ -58,18 +63,18 @@ func setupTreeForLinksTest(t *testing.T) (*tree.TreeService, string, string) { } // create "docs" under root - docsIDPtr, err := ts.CreatePage("system", nil, "Docs", "docs") + docsIDPtr, err := ts.CreateNode("system", nil, "Docs", "docs", pageNodeKind()) if err != nil { t.Fatalf("CreatePage docs failed: %v", err) } docsID := *docsIDPtr // create "page1" and "page2" under docs - page1IDPtr, err := ts.CreatePage("system", &docsID, "Page 1", "page1") + page1IDPtr, err := ts.CreateNode("system", &docsID, "Page 1", "page1", pageNodeKind()) if err != nil { t.Fatalf("CreatePage page1 failed: %v", err) } - page2IDPtr, err := ts.CreatePage("system", &docsID, "Page 2", "page2") + page2IDPtr, err := ts.CreateNode("system", &docsID, "Page 2", "page2", pageNodeKind()) if err != nil { t.Fatalf("CreatePage page2 failed: %v", err) } @@ -168,13 +173,13 @@ func setupLinkService(t *testing.T) (*LinkService, *tree.TreeService, *LinksStor func createSimpleLinkedPages(t *testing.T, ts *tree.TreeService) (pageAID, pageBID string) { t.Helper() - aIDPtr, err := ts.CreatePage("system", nil, "Page A", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Page A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage a failed: %v", err) } pageAID = *aIDPtr - bIDPtr, err := ts.CreatePage("system", nil, "Page B", "b") + bIDPtr, err := ts.CreateNode("system", nil, "Page B", "b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage b failed: %v", err) } @@ -185,7 +190,7 @@ func createSimpleLinkedPages(t *testing.T, ts *tree.TreeService) (pageAID, pageB t.Fatalf("GetPage a failed: %v", err) } contentA := "Link to B: [Go to B](/b)" - if err := ts.UpdatePage("system", aPage.ID, aPage.Title, aPage.Slug, contentA); err != nil { + if err := ts.UpdateNode("system", aPage.ID, aPage.Title, aPage.Slug, &contentA); err != nil { t.Fatalf("UpdatePage a failed: %v", err) } @@ -194,7 +199,7 @@ func createSimpleLinkedPages(t *testing.T, ts *tree.TreeService) (pageAID, pageB t.Fatalf("GetPage b failed: %v", err) } contentB := "# Page B\nNo outgoing links." - if err := ts.UpdatePage("system", bPage.ID, bPage.Title, bPage.Slug, contentB); err != nil { + if err := ts.UpdateNode("system", bPage.ID, bPage.Title, bPage.Slug, &contentB); err != nil { t.Fatalf("UpdatePage b failed: %v", err) } @@ -242,7 +247,8 @@ func TestLinkService_IndexAllPages_ReplacesExistingLinks(t *testing.T) { if err != nil { t.Fatalf("GetPage a failed: %v", err) } - if err := ts.UpdatePage("system", aPage.ID, aPage.Title, aPage.Slug, "No more links."); err != nil { + var noLinks string = "No more links." + if err := ts.UpdateNode("system", aPage.ID, aPage.Title, aPage.Slug, &noLinks); err != nil { t.Fatalf("UpdatePage a failed: %v", err) } @@ -355,9 +361,9 @@ func TestLinkService_GetOutgoingLinksForPage_ReturnsOutgoingLinks(t *testing.T) func TestLinkService_GetOutgoingLinksForPage_NoOutgoings(t *testing.T) { svc, ts, _ := setupLinkService(t) - aIDPtr, err := ts.CreatePage("system", nil, "Lonely Page", "lonely") + aIDPtr, err := ts.CreateNode("system", nil, "Lonely Page", "lonely", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage lonely failed: %v", err) + t.Fatalf("CreateNode lonely failed: %v", err) } lonelyID := *aIDPtr @@ -366,8 +372,9 @@ func TestLinkService_GetOutgoingLinksForPage_NoOutgoings(t *testing.T) { t.Fatalf("GetPage lonely failed: %v", err) } - if err := ts.UpdatePage("system", page.ID, page.Title, page.Slug, "Just some text, no links."); err != nil { - t.Fatalf("UpdatePage lonely failed: %v", err) + var noLinks string = "Just some text, no links." + if err := ts.UpdateNode("system", page.ID, page.Title, page.Slug, &noLinks); err != nil { + t.Fatalf("UpdateNode lonely failed: %v", err) } if err := svc.IndexAllPages(); err != nil { @@ -434,9 +441,9 @@ func TestToOutgoingResult_MapsOutgoingToResultItems(t *testing.T) { func TestLinkService_LateCreatedTarget_BecomesResolvedAfterReindex(t *testing.T) { svc, ts, _ := setupLinkService(t) - aIDPtr, err := ts.CreatePage("system", nil, "Page A", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Page A", "a", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage a failed: %v", err) + t.Fatalf("CreateNode a failed: %v", err) } pageAID := *aIDPtr @@ -444,8 +451,9 @@ func TestLinkService_LateCreatedTarget_BecomesResolvedAfterReindex(t *testing.T) if err != nil { t.Fatalf("GetPage a failed: %v", err) } - if err := ts.UpdatePage("system", aPage.ID, aPage.Title, aPage.Slug, "Link to B: [Go](/b)"); err != nil { - t.Fatalf("UpdatePage a failed: %v", err) + var linkToB string = "Link to B: [Go](/b)" + if err := ts.UpdateNode("system", aPage.ID, aPage.Title, aPage.Slug, &linkToB); err != nil { + t.Fatalf("UpdateNode a failed: %v", err) } if err := svc.IndexAllPages(); err != nil { @@ -469,9 +477,9 @@ func TestLinkService_LateCreatedTarget_BecomesResolvedAfterReindex(t *testing.T) t.Fatalf("expected empty ToPageID for broken link, got %q", out1.Outgoings[0].ToPageID) } - bIDPtr, err := ts.CreatePage("system", nil, "Page B", "b") + bIDPtr, err := ts.CreateNode("system", nil, "Page B", "b", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage b failed: %v", err) + t.Fatalf("CreateNode b failed: %v", err) } pageBID := *bIDPtr @@ -479,8 +487,9 @@ func TestLinkService_LateCreatedTarget_BecomesResolvedAfterReindex(t *testing.T) if err != nil { t.Fatalf("GetPage b failed: %v", err) } - if err := ts.UpdatePage("system", bPage.ID, bPage.Title, bPage.Slug, "# Page B"); err != nil { - t.Fatalf("UpdatePage b failed: %v", err) + var pageBContent string = "# Page B" + if err := ts.UpdateNode("system", bPage.ID, bPage.Title, bPage.Slug, &pageBContent); err != nil { + t.Fatalf("UpdateNode b failed: %v", err) } if err := svc.IndexAllPages(); err != nil { @@ -519,9 +528,9 @@ func TestLinkService_LateCreatedTarget_BecomesResolvedAfterReindex(t *testing.T) func TestLinkService_HealOnPageCreate_ResolvesBrokenLinksWithoutReindex(t *testing.T) { svc, ts, _ := setupLinkService(t) - aIDPtr, err := ts.CreatePage("system", nil, "Page A", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Page A", "a", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage A failed: %v", err) + t.Fatalf("CreateNode A failed: %v", err) } pageAID := *aIDPtr @@ -529,8 +538,9 @@ func TestLinkService_HealOnPageCreate_ResolvesBrokenLinksWithoutReindex(t *testi if err != nil { t.Fatalf("GetPage A failed: %v", err) } - if err := ts.UpdatePage("system", pageA.ID, pageA.Title, pageA.Slug, "Link to B: [Go](/b)"); err != nil { - t.Fatalf("UpdatePage A failed: %v", err) + var linkToB string = "Link to B: [Go](/b)" + if err := ts.UpdateNode("system", pageA.ID, pageA.Title, pageA.Slug, &linkToB); err != nil { + t.Fatalf("UpdateNode A failed: %v", err) } if err := svc.IndexAllPages(); err != nil { @@ -555,9 +565,9 @@ func TestLinkService_HealOnPageCreate_ResolvesBrokenLinksWithoutReindex(t *testi t.Fatalf("expected empty ToPageID before heal, got %q", out1.Outgoings[0].ToPageID) } - bIDPtr, err := ts.CreatePage("system", nil, "Page B", "b") + bIDPtr, err := ts.CreateNode("system", nil, "Page B", "b", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage B failed: %v", err) + t.Fatalf("CreateNode B failed: %v", err) } pageBID := *bIDPtr @@ -604,21 +614,21 @@ func TestLinksStore_GetBrokenIncomingForPath_ReturnsBrokenLinks(t *testing.T) { svc, ts, store := setupLinkService(t) // Create three pages: A, B, C - aIDPtr, err := ts.CreatePage("system", nil, "Page A", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Page A", "a", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage A failed: %v", err) + t.Fatalf("CreateNode A failed: %v", err) } pageAID := *aIDPtr - bIDPtr, err := ts.CreatePage("system", nil, "Page B", "b") + bIDPtr, err := ts.CreateNode("system", nil, "Page B", "b", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage B failed: %v", err) + t.Fatalf("CreateNode B failed: %v", err) } pageBID := *bIDPtr - cIDPtr, err := ts.CreatePage("system", nil, "Page C", "c") + cIDPtr, err := ts.CreateNode("system", nil, "Page C", "c", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage C failed: %v", err) + t.Fatalf("CreateNode C failed: %v", err) } pageCID := *cIDPtr @@ -627,16 +637,17 @@ func TestLinksStore_GetBrokenIncomingForPath_ReturnsBrokenLinks(t *testing.T) { if err != nil { t.Fatalf("GetPage A failed: %v", err) } - if err := ts.UpdatePage("system", pageA.ID, pageA.Title, pageA.Slug, "Link: [Missing](/nonexistent)"); err != nil { - t.Fatalf("UpdatePage A failed: %v", err) + var linkToMissing string = "Link: [Missing](/nonexistent)" + if err := ts.UpdateNode("system", pageA.ID, pageA.Title, pageA.Slug, &linkToMissing); err != nil { + t.Fatalf("UpdateNode A failed: %v", err) } pageB, err := ts.GetPage(pageBID) if err != nil { t.Fatalf("GetPage B failed: %v", err) } - if err := ts.UpdatePage("system", pageB.ID, pageB.Title, pageB.Slug, "Link: [Missing](/nonexistent)"); err != nil { - t.Fatalf("UpdatePage B failed: %v", err) + if err := ts.UpdateNode("system", pageB.ID, pageB.Title, pageB.Slug, &linkToMissing); err != nil { + t.Fatalf("UpdateNode B failed: %v", err) } // Page C links to a different broken page @@ -644,8 +655,9 @@ func TestLinksStore_GetBrokenIncomingForPath_ReturnsBrokenLinks(t *testing.T) { if err != nil { t.Fatalf("GetPage C failed: %v", err) } - if err := ts.UpdatePage("system", pageC.ID, pageC.Title, pageC.Slug, "Link: [Other](/other-missing)"); err != nil { - t.Fatalf("UpdatePage C failed: %v", err) + var linkToOther string = "Link: [Other](/other-missing)" + if err := ts.UpdateNode("system", pageC.ID, pageC.Title, pageC.Slug, &linkToOther); err != nil { + t.Fatalf("UpdateNode C failed: %v", err) } // Index all pages to create broken links @@ -692,15 +704,15 @@ func TestLinksStore_GetBrokenIncomingForPath_ReturnsBrokenLinks(t *testing.T) { func TestLinksStore_GetBrokenIncomingForPath_FiltersByPath(t *testing.T) { svc, ts, store := setupLinkService(t) - aIDPtr, err := ts.CreatePage("system", nil, "Page A", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Page A", "a", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage A failed: %v", err) + t.Fatalf("CreateNode A failed: %v", err) } pageAID := *aIDPtr - bIDPtr, err := ts.CreatePage("system", nil, "Page B", "b") + bIDPtr, err := ts.CreateNode("system", nil, "Page B", "b", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage B failed: %v", err) + t.Fatalf("CreateNode B failed: %v", err) } pageBID := *bIDPtr @@ -709,8 +721,9 @@ func TestLinksStore_GetBrokenIncomingForPath_FiltersByPath(t *testing.T) { if err != nil { t.Fatalf("GetPage A failed: %v", err) } - if err := ts.UpdatePage("system", pageA.ID, pageA.Title, pageA.Slug, "Link: [Missing1](/missing1)"); err != nil { - t.Fatalf("UpdatePage A failed: %v", err) + var linkToMissing1 string = "Link: [Missing1](/missing1)" + if err := ts.UpdateNode("system", pageA.ID, pageA.Title, pageA.Slug, &linkToMissing1); err != nil { + t.Fatalf("UpdateNode A failed: %v", err) } // Page B links to "/missing2" @@ -718,8 +731,9 @@ func TestLinksStore_GetBrokenIncomingForPath_FiltersByPath(t *testing.T) { if err != nil { t.Fatalf("GetPage B failed: %v", err) } - if err := ts.UpdatePage("system", pageB.ID, pageB.Title, pageB.Slug, "Link: [Missing2](/missing2)"); err != nil { - t.Fatalf("UpdatePage B failed: %v", err) + var linkToMissing2 string = "Link: [Missing2](/missing2)" + if err := ts.UpdateNode("system", pageB.ID, pageB.Title, pageB.Slug, &linkToMissing2); err != nil { + t.Fatalf("UpdateNode B failed: %v", err) } if err := svc.IndexAllPages(); err != nil { @@ -756,15 +770,15 @@ func TestLinksStore_GetBrokenIncomingForPath_FiltersByPath(t *testing.T) { func TestLinksStore_GetBrokenIncomingForPath_EmptyWhenNoBrokenLinks(t *testing.T) { svc, ts, store := setupLinkService(t) - aIDPtr, err := ts.CreatePage("system", nil, "Page A", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Page A", "a", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage A failed: %v", err) + t.Fatalf("CreateNode A failed: %v", err) } pageAID := *aIDPtr - _, err = ts.CreatePage("system", nil, "Page B", "b") + _, err = ts.CreateNode("system", nil, "Page B", "b", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage B failed: %v", err) + t.Fatalf("CreateNode B failed: %v", err) } // Page A links to existing Page B (not broken) @@ -772,8 +786,9 @@ func TestLinksStore_GetBrokenIncomingForPath_EmptyWhenNoBrokenLinks(t *testing.T if err != nil { t.Fatalf("GetPage A failed: %v", err) } - if err := ts.UpdatePage("system", pageA.ID, pageA.Title, pageA.Slug, "Link: [To B](/b)"); err != nil { - t.Fatalf("UpdatePage A failed: %v", err) + var linkToB string = "Link: [To B](/b)" + if err := ts.UpdateNode("system", pageA.ID, pageA.Title, pageA.Slug, &linkToB); err != nil { + t.Fatalf("UpdateNode A failed: %v", err) } if err := svc.IndexAllPages(); err != nil { @@ -805,19 +820,19 @@ func TestLinksStore_GetBrokenIncomingForPath_OrdersByFromTitle(t *testing.T) { svc, ts, store := setupLinkService(t) // Create three pages with titles that should be ordered alphabetically - zIDPtr, err := ts.CreatePage("system", nil, "Zebra Page", "z") + zIDPtr, err := ts.CreateNode("system", nil, "Zebra Page", "z", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage Z failed: %v", err) + t.Fatalf("CreateNode Z failed: %v", err) } - aIDPtr, err := ts.CreatePage("system", nil, "Alpha Page", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Alpha Page", "a", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage A failed: %v", err) + t.Fatalf("CreateNode A failed: %v", err) } - mIDPtr, err := ts.CreatePage("system", nil, "Middle Page", "m") + mIDPtr, err := ts.CreateNode("system", nil, "Middle Page", "m", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage M failed: %v", err) + t.Fatalf("CreateNode M failed: %v", err) } // All three pages link to the same non-existent page @@ -827,8 +842,9 @@ func TestLinksStore_GetBrokenIncomingForPath_OrdersByFromTitle(t *testing.T) { if err != nil { t.Fatalf("GetPage(%s) failed: %v", id, err) } - if err := ts.UpdatePage("system", page.ID, page.Title, page.Slug, "Link: [Missing](/missing)"); err != nil { - t.Fatalf("UpdatePage(%s) failed: %v", id, err) + var linkToMissing string = "Link: [Missing](/missing)" + if err := ts.UpdateNode("system", page.ID, page.Title, page.Slug, &linkToMissing); err != nil { + t.Fatalf("UpdateNode(%s) failed: %v", id, err) } } @@ -859,9 +875,9 @@ func TestLinksStore_GetBrokenIncomingForPath_OnlyReturnsBrokenNotResolved(t *tes svc, ts, store := setupLinkService(t) // Create Page A that links to a non-existent page - aIDPtr, err := ts.CreatePage("system", nil, "Page A", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Page A", "a", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage A failed: %v", err) + t.Fatalf("CreateNode A failed: %v", err) } pageAID := *aIDPtr @@ -869,8 +885,9 @@ func TestLinksStore_GetBrokenIncomingForPath_OnlyReturnsBrokenNotResolved(t *tes if err != nil { t.Fatalf("GetPage A failed: %v", err) } - if err := ts.UpdatePage("system", pageA.ID, pageA.Title, pageA.Slug, "Link: [To B](/b)"); err != nil { - t.Fatalf("UpdatePage A failed: %v", err) + var linkToB string = "Link: [To B](/b)" + if err := ts.UpdateNode("system", pageA.ID, pageA.Title, pageA.Slug, &linkToB); err != nil { + t.Fatalf("UpdateNode A failed: %v", err) } // Index - this creates a broken link since B doesn't exist @@ -888,9 +905,9 @@ func TestLinksStore_GetBrokenIncomingForPath_OnlyReturnsBrokenNotResolved(t *tes } // Now create Page B - this should heal the link - bIDPtr, err := ts.CreatePage("system", nil, "Page B", "b") + bIDPtr, err := ts.CreateNode("system", nil, "Page B", "b", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage B failed: %v", err) + t.Fatalf("CreateNode B failed: %v", err) } pageBID := *bIDPtr @@ -898,8 +915,9 @@ func TestLinksStore_GetBrokenIncomingForPath_OnlyReturnsBrokenNotResolved(t *tes if err != nil { t.Fatalf("GetPage B failed: %v", err) } - if err := ts.UpdatePage("system", pageB.ID, pageB.Title, pageB.Slug, "# Page B"); err != nil { - t.Fatalf("UpdatePage B failed: %v", err) + var contentB string = "# Page B" + if err := ts.UpdateNode("system", pageB.ID, pageB.Title, pageB.Slug, &contentB); err != nil { + t.Fatalf("UpdateNode B failed: %v", err) } // Use HealLinksForExactPath to heal the broken link diff --git a/internal/search/bootstrap_test.go b/internal/search/bootstrap_test.go index 0a483272..1aaea37f 100644 --- a/internal/search/bootstrap_test.go +++ b/internal/search/bootstrap_test.go @@ -19,9 +19,10 @@ func TestBuildAndRunIndexer_BasicIndexing(t *testing.T) { t.Fatalf("failed to load tree: %v", err) } - _, err := treeSvc.CreatePage("system", nil, "Docs", "docs") + var pageNodeKind tree.NodeKind = "page" + _, err := treeSvc.CreateNode("system", nil, "Docs", "docs", &pageNodeKind) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("CreateNode failed: %v", err) } mdPath := filepath.Join(tmp, "root", "docs.md") diff --git a/internal/wiki/wiki.go b/internal/wiki/wiki.go index a2583174..bc3f3615 100644 --- a/internal/wiki/wiki.go +++ b/internal/wiki/wiki.go @@ -3,6 +3,7 @@ package wiki import ( "fmt" "log" + "log/slog" "mime/multipart" "path" "regexp" @@ -31,6 +32,7 @@ type Wiki struct { storageDir string searchWatcher *search.Watcher links *links.LinkService + log *slog.Logger } var emailRegex = regexp.MustCompile(`^[a-zA-Z0-9._%+\-]+@[a-zA-Z0-9.\-]+$`) @@ -65,6 +67,9 @@ type WikiOptions struct { } func NewWiki(options *WikiOptions) (*Wiki, error) { + + logger := slog.Default().With("component", "Wiki") + // Initialize the user store store, err := auth.NewUserStore(options.StorageDir) if err != nil { @@ -112,7 +117,7 @@ func NewWiki(options *WikiOptions) (*Wiki, error) { } linkService := links.NewLinkService(options.StorageDir, treeService, linksStore) if err := linkService.IndexAllPages(); err != nil { - log.Printf("failed to index links of pages: %v", err) + logger.Warn("failed to index links on startup", "error", err) } sqliteIndex, err := search.NewSQLiteIndex(options.StorageDir) @@ -128,18 +133,18 @@ func NewWiki(options *WikiOptions) (*Wiki, error) { go func() { err := search.BuildAndRunIndexer(treeService, sqliteIndex, path.Join(options.StorageDir, "root"), 4, status) if err != nil { - log.Printf("indexing failed: %v", err) + logger.Warn("indexing failed", "error", err) } }() // Start the file watcher for indexing searchWatcher, err = search.NewWatcher(path.Join(options.StorageDir, "root"), treeService, sqliteIndex, status) if err != nil { - log.Printf("failed to create file watcher: %v", err) + logger.Warn("failed to create file watcher", "error", err) } else { go func() { if err := searchWatcher.Start(); err != nil { - log.Printf("failed to start file watcher: %v", err) + logger.Warn("failed to start file watcher", "error", err) } }() } @@ -164,6 +169,7 @@ func NewWiki(options *WikiOptions) (*Wiki, error) { status: status, searchWatcher: searchWatcher, links: linkService, + log: logger, } // Ensure the welcome page exists @@ -175,16 +181,12 @@ func NewWiki(options *WikiOptions) (*Wiki, error) { } func (w *Wiki) EnsureWelcomePage() error { - _, err := w.tree.GetPage("root") - if err == nil { - return nil - } - if len(w.tree.GetTree().Children) > 0 { + w.log.Info("Welcome page already exists, skipping creation") return nil } - - p, err := w.CreatePage(SYSTEM_USER_ID, nil, "Welcome to LeafWiki", "welcome-to-leafwiki") + k := tree.NodeKindPage + p, err := w.CreatePage(SYSTEM_USER_ID, nil, "Welcome to LeafWiki", "welcome-to-leafwiki", &k) if err != nil { return err } @@ -221,7 +223,7 @@ LeafWiki is designed for clarity, structure, and long-term maintainability — n - **Bold** ` + "- `Inline code` \n```\n\n" + "Enjoy writing!" - if _, err := w.UpdatePage(SYSTEM_USER_ID, p.ID, p.Title, p.Slug, content); err != nil { + if _, err := w.UpdatePage(SYSTEM_USER_ID, p.ID, p.Title, p.Slug, &content, &k); err != nil { return err } @@ -232,13 +234,17 @@ func (w *Wiki) GetTree() *tree.PageNode { return w.tree.GetTree() } -func (w *Wiki) CreatePage(userID string, parentID *string, title string, slug string) (*tree.Page, error) { +func (w *Wiki) CreatePage(userID string, parentID *string, title string, slug string, kind *tree.NodeKind) (*tree.Page, error) { ve := errors.NewValidationErrors() if title == "" { ve.Add("title", "Title must not be empty") } + if kind == nil { + ve.Add("kind", "Kind must be specified") + } + if err := w.slug.IsValidSlug(slug); err != nil { ve.Add("slug", err.Error()) } @@ -255,10 +261,20 @@ func (w *Wiki) CreatePage(userID string, parentID *string, title string, slug st return nil, err } } - - id, err := w.tree.CreatePage(userID, parentID, title, slug) - if err != nil { - return nil, err + var id *string + if *kind == tree.NodeKindPage { + var err error + id, err = w.tree.CreateNode(userID, parentID, title, slug, kind) + if err != nil { + return nil, err + } + } + if *kind == tree.NodeKindSection { + var err error + id, err = w.tree.CreateNode(userID, parentID, title, slug, kind) + if err != nil { + return nil, err + } } page, err := w.tree.GetPage(*id) @@ -275,7 +291,7 @@ func (w *Wiki) CreatePage(userID string, parentID *string, title string, slug st return page, nil } -func (w *Wiki) EnsurePath(userID string, targetPath string, targetTitle string) (*tree.Page, error) { +func (w *Wiki) EnsurePath(userID string, targetPath string, targetTitle string, kind *tree.NodeKind) (*tree.Page, error) { ve := errors.NewValidationErrors() cleanTargetPath := strings.Trim(strings.TrimSpace(targetPath), "/") @@ -316,7 +332,7 @@ func (w *Wiki) EnsurePath(userID string, targetPath string, targetTitle string) } // Now we create the missing segments - result, err := w.tree.EnsurePagePath(userID, cleanTargetPath, cleanTargetTitle) + result, err := w.tree.EnsurePagePath(userID, cleanTargetPath, cleanTargetTitle, kind) if err != nil { return nil, err } @@ -342,7 +358,7 @@ func (w *Wiki) EnsurePath(userID string, targetPath string, targetTitle string) return page, nil } -func (w *Wiki) UpdatePage(userID string, id, title, slug, content string) (*tree.Page, error) { +func (w *Wiki) UpdatePage(userID string, id, title, slug string, content *string, kind *tree.NodeKind) (*tree.Page, error) { // Validate the request ve := errors.NewValidationErrors() @@ -372,7 +388,7 @@ func (w *Wiki) UpdatePage(userID string, id, title, slug, content string) (*tree } } - if err = w.tree.UpdatePage(userID, id, title, slug, content); err != nil { + if err = w.tree.UpdateNode(userID, id, title, slug, content); err != nil { return nil, err } @@ -403,8 +419,10 @@ func (w *Wiki) UpdatePage(userID string, id, title, slug, content string) (*tree } } } else { - if err := w.links.UpdateLinksForPage(after, content); err != nil { - log.Printf("warning: failed to update links for page %s: %v", after.ID, err) + if content != nil { + if err := w.links.UpdateLinksForPage(after, *content); err != nil { + log.Printf("warning: failed to update links for page %s: %v", after.ID, err) + } } if err := w.links.HealLinksForExactPath(after); err != nil { log.Printf("warning: failed to heal links for page %s: %v", after.ID, err) @@ -435,13 +453,15 @@ func (w *Wiki) CopyPage(userID string, currentPageID string, targetParentID *str return nil, err } + kind := tree.NodeKindPage + // Create a copy of the page - copyID, err := w.tree.CreatePage(userID, targetParentID, title, slug) + copyID, err := w.tree.CreateNode(userID, targetParentID, title, slug, &kind) if err != nil { log.Printf("error: could not create page copy: %v", err) return nil, err } - cleanup := func() { _ = w.tree.DeletePage(userID, *copyID, false) } + cleanup := func() { _ = w.tree.DeleteNode(userID, *copyID, false) } // Get the copied page copy, err := w.tree.GetPage(*copyID) @@ -462,7 +482,7 @@ func (w *Wiki) CopyPage(userID string, currentPageID string, targetParentID *str updatedContent := strings.ReplaceAll(page.Content, "/assets/"+page.ID+"/", "/assets/"+copy.ID+"/") // Write the content to the copied page - if err := w.tree.UpdatePage(userID, copy.ID, copy.Title, copy.Slug, updatedContent); err != nil { + if err := w.tree.UpdateNode(userID, copy.ID, copy.Title, copy.Slug, &updatedContent); err != nil { log.Printf("error: could not update copied page content: %v", err) cleanup() _ = w.asset.DeleteAllAssetsForPage(copy.PageNode) @@ -514,7 +534,7 @@ func (w *Wiki) DeletePage(userID string, id string, recursive bool) error { oldPrefix = page.CalculatePath() } - if err := w.tree.DeletePage(userID, id, recursive); err != nil { + if err := w.tree.DeleteNode(userID, id, recursive); err != nil { log.Printf("error: could not delete page: %v", err) return err } @@ -542,7 +562,7 @@ func (w *Wiki) DeletePage(userID string, id string, recursive bool) error { return nil } - if err := w.tree.DeletePage(userID, id, recursive); err != nil { + if err := w.tree.DeleteNode(userID, id, recursive); err != nil { log.Printf("error: could not delete page: %v", err) return err } @@ -595,7 +615,7 @@ func (w *Wiki) MovePage(userID, id, parentID string) error { oldPrefix = p.CalculatePath() } } - if err := w.tree.MovePage(userID, id, parentID); err != nil { + if err := w.tree.MoveNode(userID, id, parentID); err != nil { return err } diff --git a/internal/wiki/wiki_test.go b/internal/wiki/wiki_test.go index 02fee30d..fffe0a1c 100644 --- a/internal/wiki/wiki_test.go +++ b/internal/wiki/wiki_test.go @@ -23,11 +23,16 @@ func createWikiTestInstance(t *testing.T) *Wiki { return wikiInstance } +func pageNodeKind() *tree.NodeKind { + kind := tree.NodeKindPage + return &kind +} + func TestWiki_CreatePage_Root(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - page, err := w.CreatePage("system", nil, "Home", "home") + page, err := w.CreatePage("system", nil, "Home", "home", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -40,9 +45,10 @@ func TestWiki_CreatePage_Root(t *testing.T) { func TestWiki_CreatePage_WithParent(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - rootPage, _ := w.CreatePage("system", nil, "Docs", "docs") + kind := tree.NodeKindPage + rootPage, _ := w.CreatePage("system", nil, "Docs", "docs", &kind) - page, err := w.CreatePage("system", &rootPage.ID, "API-Doc", "api-doc") + page, err := w.CreatePage("system", &rootPage.ID, "API-Doc", "api-doc", &kind) if err != nil { t.Fatalf("CreatePage with parent failed: %v", err) } @@ -55,7 +61,7 @@ func TestWiki_CreatePage_WithParent(t *testing.T) { func TestWiki_CreatePage_EmptyTitle(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - _, err := w.CreatePage("system", nil, "", "empty") + _, err := w.CreatePage("system", nil, "", "empty", pageNodeKind()) if err == nil { t.Error("Expected error for empty title, got none") } @@ -64,7 +70,7 @@ func TestWiki_CreatePage_EmptyTitle(t *testing.T) { func TestWiki_CreatePage_ReservedSlug(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - _, err := w.CreatePage("system", nil, "Reserved", "e") + _, err := w.CreatePage("system", nil, "Reserved", "e", pageNodeKind()) if err == nil { t.Error("Expected error for reserved slug, got none") } @@ -82,9 +88,9 @@ func TestWiki_CreatePage_ReservedSlug(t *testing.T) { func TestWiki_CreatePage_PageExists(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - _, _ = w.CreatePage("system", nil, "Duplicate", "duplicate") + _, _ = w.CreatePage("system", nil, "Duplicate", "duplicate", pageNodeKind()) - _, err := w.CreatePage("system", nil, "Duplicate", "duplicate") + _, err := w.CreatePage("system", nil, "Duplicate", "duplicate", pageNodeKind()) if err == nil { t.Error("Expected error for duplicate page, got none") } @@ -94,7 +100,7 @@ func TestWiki_CreatePage_InvalidParent(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() invalidID := "not-real" - _, err := w.CreatePage("system", &invalidID, "Broken", "broken") + _, err := w.CreatePage("system", &invalidID, "Broken", "broken", pageNodeKind()) if err == nil { t.Error("Expected error with invalid parent ID, got none") } @@ -103,7 +109,7 @@ func TestWiki_CreatePage_InvalidParent(t *testing.T) { func TestWiki_GetPage_ValidID(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - page, _ := w.CreatePage("system", nil, "ReadMe", "readme") + page, _ := w.CreatePage("system", nil, "ReadMe", "readme", pageNodeKind()) found, err := w.GetPage(page.ID) if err != nil { @@ -127,8 +133,8 @@ func TestWiki_GetPage_InvalidID(t *testing.T) { func TestWiki_MovePage_Valid(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - parent, _ := w.CreatePage("system", nil, "Projects", "projects") - child, _ := w.CreatePage("system", nil, "Old", "old") + parent, _ := w.CreatePage("system", nil, "Projects", "projects", pageNodeKind()) + child, _ := w.CreatePage("system", nil, "Old", "old", pageNodeKind()) err := w.MovePage("system", child.ID, parent.ID) if err != nil { @@ -139,8 +145,7 @@ func TestWiki_MovePage_Valid(t *testing.T) { func TestWiki_DeletePage_Simple(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - page, _ := w.CreatePage("system", nil, "Trash", "trash") - + page, _ := w.CreatePage("system", nil, "Trash", "trash", pageNodeKind()) err := w.DeletePage("system", page.ID, false) if err != nil { t.Fatalf("DeletePage failed: %v", err) @@ -150,8 +155,8 @@ func TestWiki_DeletePage_Simple(t *testing.T) { func TestWiki_DeletePage_WithChildren(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - parent, _ := w.CreatePage("system", nil, "Parent", "parent") - _, _ = w.CreatePage("system", &parent.ID, "Child", "child") + parent, _ := w.CreatePage("system", nil, "Parent", "parent", pageNodeKind()) + _, _ = w.CreatePage("system", &parent.ID, "Child", "child", pageNodeKind()) err := w.DeletePage("system", parent.ID, false) if err == nil { @@ -162,8 +167,8 @@ func TestWiki_DeletePage_WithChildren(t *testing.T) { func TestWiki_DeletePage_Recursive(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - parent, _ := w.CreatePage("system", nil, "Parent", "parent") - _, _ = w.CreatePage("system", &parent.ID, "Child", "child") + parent, _ := w.CreatePage("system", nil, "Parent", "parent", pageNodeKind()) + _, _ = w.CreatePage("system", &parent.ID, "Child", "child", pageNodeKind()) err := w.DeletePage("system", parent.ID, true) if err != nil { @@ -205,9 +210,9 @@ func TestWiki_UpdatePage(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - page, _ := w.CreatePage("system", nil, "Draft", "draft") - - page, err := w.UpdatePage("system", page.ID, "Final", "final", "# Updated") + page, _ := w.CreatePage("system", nil, "Draft", "draft", pageNodeKind()) + var updatedstr string = "# Updated" + page, err := w.UpdatePage("system", page.ID, "Final", "final", &updatedstr, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage failed: %v", err) } @@ -234,7 +239,7 @@ func TestWiki_SuggestSlug_Conflict(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() root := w.GetTree() - _, err := w.CreatePage("system", nil, "My Page", "my-page") + _, err := w.CreatePage("system", nil, "My Page", "my-page", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) @@ -254,14 +259,14 @@ func TestWiki_SuggestSlug_DeepHierarchy(t *testing.T) { defer w.Close() // create a deep hierarchy of pages (Architecture -> Backend) - _, err := w.CreatePage("system", nil, "Architecture", "architecture") + _, err := w.CreatePage("system", nil, "Architecture", "architecture", pageNodeKind()) if err != nil { t.Fatalf("Failed to create 'Architecture': %v", err) } root := w.GetTree() arch := root.Children[0] - _, err = w.CreatePage("system", &arch.ID, "Backend", "backend") + _, err = w.CreatePage("system", &arch.ID, "Backend", "backend", pageNodeKind()) if err != nil { t.Fatalf("Failed to create 'Backend': %v", err) } @@ -278,7 +283,7 @@ func TestWiki_SuggestSlug_DeepHierarchy(t *testing.T) { } // Create a second one with the same name → it must be numbered - _, err = w.CreatePage("system", &backend.ID, "Data Layer", "data-layer") + _, err = w.CreatePage("system", &backend.ID, "Data Layer", "data-layer", pageNodeKind()) if err != nil { t.Fatalf("Failed to create 'Data Layer': %v", err) } @@ -296,7 +301,7 @@ func TestWiki_SuggestSlug_DeepHierarchy(t *testing.T) { func TestWiki_FindByPath_Valid(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - _, _ = w.CreatePage("system", nil, "Company", "company") + _, _ = w.CreatePage("system", nil, "Company", "company", pageNodeKind()) found, err := w.FindByPath("company") if err != nil { @@ -319,9 +324,9 @@ func TestWiki_FindByPath_Invalid(t *testing.T) { func TestWiki_SortPages(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - parent, _ := w.CreatePage("system", nil, "Parent", "parent") - child1, _ := w.CreatePage("system", &parent.ID, "Child1", "child1") - child2, _ := w.CreatePage("system", &parent.ID, "Child2", "child2") + parent, _ := w.CreatePage("system", nil, "Parent", "parent", pageNodeKind()) + child1, _ := w.CreatePage("system", &parent.ID, "Child1", "child1", pageNodeKind()) + child2, _ := w.CreatePage("system", &parent.ID, "Child2", "child2", pageNodeKind()) err := w.SortPages(parent.ID, []string{child2.ID, child1.ID}) if err != nil { @@ -338,7 +343,7 @@ func TestWiki_SortPages(t *testing.T) { func TestWiki_CopyPages(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - original, _ := w.CreatePage("system", nil, "Original", "original") + original, _ := w.CreatePage("system", nil, "Original", "original", pageNodeKind()) copied, err := w.CopyPage("system", original.ID, nil, "Copy of Original", "copy-of-original") if err != nil { @@ -359,8 +364,8 @@ func TestWiki_CopyPages(t *testing.T) { func TestWiki_CopyPages_WithParent(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - parent, _ := w.CreatePage("system", nil, "Parent", "parent") - original, _ := w.CreatePage("system", nil, "Original", "original") + parent, _ := w.CreatePage("system", nil, "Parent", "parent", pageNodeKind()) + original, _ := w.CreatePage("system", nil, "Original", "original", pageNodeKind()) copied, err := w.CopyPage("system", original.ID, &parent.ID, "Copy of Original", "copy-of-original") if err != nil { @@ -384,7 +389,7 @@ func TestWiki_CopyPages_NonExistentSource(t *testing.T) { func TestWiki_CopyPages_WithAssets(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - original, _ := w.CreatePage("system", nil, "Original", "original") + original, _ := w.CreatePage("system", nil, "Original", "original", pageNodeKind()) originalNode := tree.PageNode{ ID: original.ID, @@ -454,12 +459,13 @@ func TestWiki_EnsurePath_HealsLinksForAllCreatedSegments(t *testing.T) { defer w.Close() // 1) Create page A with links to /x and /x/y (both non-existing) - pageA, err := w.CreatePage("system", nil, "Page A", "a") + pageA, err := w.CreatePage("system", nil, "Page A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage A failed: %v", err) } - _, err = w.UpdatePage("system", pageA.ID, pageA.Title, pageA.Slug, "Links: [X](/x) and [XY](/x/y)") + var contentA string = "Links: [X](/x) and [XY](/x/y)" + _, err = w.UpdatePage("system", pageA.ID, pageA.Title, pageA.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage A failed: %v", err) } @@ -489,7 +495,7 @@ func TestWiki_EnsurePath_HealsLinksForAllCreatedSegments(t *testing.T) { } // 3) EnsurePath creates /x and /x/y and triggers Heal for all newly created segments - _, err = w.EnsurePath("system", "/x/y", "X Y") + _, err = w.EnsurePath("system", "/x/y", "X Y", pageNodeKind()) if err != nil { t.Fatalf("EnsurePath failed: %v", err) } @@ -548,21 +554,25 @@ func TestWiki_DeletePage_NonRecursive_MarksIncomingBroken(t *testing.T) { defer w.Close() // Create A with link to /b - a, err := w.CreatePage("system", nil, "Page A", "a") + a, err := w.CreatePage("system", nil, "Page A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage A failed: %v", err) } - _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, "Link to B: [Go](/b)") + + var contentA string = "Link to B: [Go](/b)" + _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage A failed: %v", err) } // Create B - b, err := w.CreatePage("system", nil, "Page B", "b") + b, err := w.CreatePage("system", nil, "Page B", "b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage B failed: %v", err) } - _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, "# Page B") + + var contentB string = "# Page B" + _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, &contentB, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage B failed: %v", err) } @@ -612,37 +622,40 @@ func TestWiki_DeletePage_Recursive_RemovesOutgoingForSubtree_AndBreaksIncomingBy defer w.Close() // Create /docs - docs, err := w.CreatePage("system", nil, "Docs", "docs") + docs, err := w.CreatePage("system", nil, "Docs", "docs", pageNodeKind()) if err != nil { t.Fatalf("CreatePage docs failed: %v", err) } // Create /docs/a and /docs/b - a, err := w.CreatePage("system", &docs.ID, "A", "a") + a, err := w.CreatePage("system", &docs.ID, "A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage a failed: %v", err) } - b, err := w.CreatePage("system", &docs.ID, "B", "b") + b, err := w.CreatePage("system", &docs.ID, "B", "b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage b failed: %v", err) } // A links to B inside subtree - _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, "Link to B: [B](/docs/b)") + var contentA string = "Link to B: [B](/docs/b)" + _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage a failed: %v", err) } - _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, "# B") + var contentB string = "# B" + _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, &contentB, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage b failed: %v", err) } // Create survivor /c with incoming link into subtree - c, err := w.CreatePage("system", nil, "C", "c") + c, err := w.CreatePage("system", nil, "C", "c", pageNodeKind()) if err != nil { t.Fatalf("CreatePage c failed: %v", err) } - _, err = w.UpdatePage("system", c.ID, c.Title, c.Slug, "Incoming link: [B](/docs/b)") + var contentC string = "Incoming link: [B](/docs/b)" + _, err = w.UpdatePage("system", c.ID, c.Title, c.Slug, &contentC, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage c failed: %v", err) } @@ -700,21 +713,23 @@ func TestWiki_RenamePage_MarksOldBroken_HealsNewExactPath(t *testing.T) { defer w.Close() // Create A with links to /b (exists) and /b2 (does not exist yet) - a, err := w.CreatePage("system", nil, "A", "a") + a, err := w.CreatePage("system", nil, "A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage A failed: %v", err) } - _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, "Links: [B](/b) and [B2](/b2)") + var contentA string = "Links: [B](/b) and [B2](/b2)" + _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage A failed: %v", err) } // Create B at /b - b, err := w.CreatePage("system", nil, "B", "b") + b, err := w.CreatePage("system", nil, "B", "b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage B failed: %v", err) } - _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, "# B") + var contentB string = "# B" + _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, &contentB, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage B failed: %v", err) } @@ -751,7 +766,8 @@ func TestWiki_RenamePage_MarksOldBroken_HealsNewExactPath(t *testing.T) { } // Rename B: /b -> /b2 - _, err = w.UpdatePage("system", b.ID, b.Title, "b2", "# B (renamed)") + var contentB2 string = "# B (renamed)" + _, err = w.UpdatePage("system", b.ID, b.Title, "b2", &contentB2, pageNodeKind()) if err != nil { t.Fatalf("Rename B failed: %v", err) } @@ -798,25 +814,27 @@ func TestWiki_RenameSubtree_BreaksOldPrefix_HealsNewSubpaths(t *testing.T) { defer w.Close() // Create subtree: /docs/b - docs, err := w.CreatePage("system", nil, "Docs", "docs") + docs, err := w.CreatePage("system", nil, "Docs", "docs", pageNodeKind()) if err != nil { t.Fatalf("CreatePage docs failed: %v", err) } - b, err := w.CreatePage("system", &docs.ID, "B", "b") + b, err := w.CreatePage("system", &docs.ID, "B", "b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage /docs/b failed: %v", err) } - _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, "# B") + var contentB string = "# B" + _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, &contentB, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage B failed: %v", err) } // Create A that links to old and future new subtree paths - a, err := w.CreatePage("system", nil, "A", "a") + a, err := w.CreatePage("system", nil, "A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage A failed: %v", err) } - _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, "Links: [Old](/docs/b) and [New](/docs2/b)") + var contentA string = "Links: [Old](/docs/b) and [New](/docs2/b)" + _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage A failed: %v", err) } @@ -835,7 +853,9 @@ func TestWiki_RenameSubtree_BreaksOldPrefix_HealsNewSubpaths(t *testing.T) { } // Rename /docs -> /docs2 - _, err = w.UpdatePage("system", docs.ID, docs.Title, "docs2", "# Docs") + var contentDocs2 string = "# Docs" + nodeSection := tree.NodeKindSection + _, err = w.UpdatePage("system", docs.ID, docs.Title, "docs2", &contentDocs2, &nodeSection) if err != nil { t.Fatalf("Rename docs failed: %v", err) } @@ -882,27 +902,29 @@ func TestWiki_MovePage_MarksOldBroken_HealsNewExactPath(t *testing.T) { defer w.Close() // Create A that links to /b (old path) and /projects/b (future path) - a, err := w.CreatePage("system", nil, "A", "a") + a, err := w.CreatePage("system", nil, "A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage A failed: %v", err) } - _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, "Links: [B](/b) and [B2](/projects/b)") + var contentA string = "Links: [B](/b) and [B2](/projects/b)" + _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage A failed: %v", err) } // Create B at /b - b, err := w.CreatePage("system", nil, "B", "b") + b, err := w.CreatePage("system", nil, "B", "b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage B failed: %v", err) } - _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, "# B") + var contentB string = "# B" + _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, &contentB, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage B failed: %v", err) } // Create parent /projects (target) - projects, err := w.CreatePage("system", nil, "Projects", "projects") + projects, err := w.CreatePage("system", nil, "Projects", "projects", pageNodeKind()) if err != nil { t.Fatalf("CreatePage projects failed: %v", err) } @@ -980,31 +1002,33 @@ func TestWiki_MoveSubtree_BreaksOldPrefix_HealsNewSubpaths(t *testing.T) { defer w.Close() // Create subtree /docs/b - docs, err := w.CreatePage("system", nil, "Docs", "docs") + docs, err := w.CreatePage("system", nil, "Docs", "docs", pageNodeKind()) if err != nil { t.Fatalf("CreatePage docs failed: %v", err) } - b, err := w.CreatePage("system", &docs.ID, "B", "b") + b, err := w.CreatePage("system", &docs.ID, "B", "b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage /docs/b failed: %v", err) } - _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, "# B") + var contentB string = "# B" + _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, &contentB, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage B failed: %v", err) } // Create target parent /archive - archive, err := w.CreatePage("system", nil, "Archive", "archive") + archive, err := w.CreatePage("system", nil, "Archive", "archive", pageNodeKind()) if err != nil { t.Fatalf("CreatePage archive failed: %v", err) } // Create A that links to old and future new subtree paths - a, err := w.CreatePage("system", nil, "A", "a") + a, err := w.CreatePage("system", nil, "A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage A failed: %v", err) } - _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, "Links: [Old](/docs/b) and [New](/archive/docs/b)") + var contentA string = "Links: [Old](/docs/b) and [New](/archive/docs/b)" + _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage A failed: %v", err) } @@ -1053,41 +1077,44 @@ func TestWiki_MovePage_ReindexesRelativeLinks(t *testing.T) { defer w.Close() // Create /docs with /docs/shared and /docs/a - docs, err := w.CreatePage("system", nil, "Docs", "docs") + docs, err := w.CreatePage("system", nil, "Docs", "docs", pageNodeKind()) if err != nil { t.Fatalf("CreatePage docs failed: %v", err) } - docsShared, err := w.CreatePage("system", &docs.ID, "Shared", "shared") + docsShared, err := w.CreatePage("system", &docs.ID, "Shared", "shared", pageNodeKind()) if err != nil { t.Fatalf("CreatePage /docs/shared failed: %v", err) } - _, err = w.UpdatePage("system", docsShared.ID, docsShared.Title, docsShared.Slug, "# Docs Shared") + var contentDocsShared string = "# Docs Shared" + _, err = w.UpdatePage("system", docsShared.ID, docsShared.Title, docsShared.Slug, &contentDocsShared, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage /docs/shared failed: %v", err) } - a, err := w.CreatePage("system", &docs.ID, "A", "a") + a, err := w.CreatePage("system", &docs.ID, "A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage /docs/a failed: %v", err) } // Important: relative link - _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, "Relative: [S](../shared)") + var contentA string = "Relative: [S](../shared)" + _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage /docs/a failed: %v", err) } // Create /guide with /guide/shared (different page!) - guide, err := w.CreatePage("system", nil, "Guide", "guide") + guide, err := w.CreatePage("system", nil, "Guide", "guide", pageNodeKind()) if err != nil { t.Fatalf("CreatePage guide failed: %v", err) } - guideShared, err := w.CreatePage("system", &guide.ID, "Shared", "shared") + guideShared, err := w.CreatePage("system", &guide.ID, "Shared", "shared", pageNodeKind()) if err != nil { t.Fatalf("CreatePage /guide/shared failed: %v", err) } - _, err = w.UpdatePage("system", guideShared.ID, guideShared.Title, guideShared.Slug, "# Guide Shared") + var contentGuideShared string = "# Guide Shared" + _, err = w.UpdatePage("system", guideShared.ID, guideShared.Title, guideShared.Slug, &contentGuideShared, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage /guide/shared failed: %v", err) } @@ -1226,7 +1253,7 @@ func TestWiki_AuthDisabled_CoreFunctionalityWorks(t *testing.T) { defer wikiInstance.Close() // Test creating a page - page, err := wikiInstance.CreatePage("system", nil, "Test Page", "test-page") + page, err := wikiInstance.CreatePage("system", nil, "Test Page", "test-page", pageNodeKind()) if err != nil { t.Fatalf("Failed to create page with AuthDisabled: %v", err) } @@ -1236,7 +1263,8 @@ func TestWiki_AuthDisabled_CoreFunctionalityWorks(t *testing.T) { } // Test updating a page - updatedPage, err := wikiInstance.UpdatePage("system", page.ID, "Updated Title", "updated-slug", "# Content") + var updatedContent string = "# Content" + updatedPage, err := wikiInstance.UpdatePage("system", page.ID, "Updated Title", "updated-slug", &updatedContent, pageNodeKind()) if err != nil { t.Fatalf("Failed to update page with AuthDisabled: %v", err) } From d198654e35fcdefe9ce7a6b5697aa8340e434661 Mon Sep 17 00:00:00 2001 From: Patrick Erber Date: Wed, 14 Jan 2026 17:10:49 +0100 Subject: [PATCH 04/11] fix: use shared write file atomic --- internal/core/tree/node_store.go | 56 ++++-------------------------- internal/core/tree/tree_service.go | 2 +- 2 files changed, 8 insertions(+), 50 deletions(-) diff --git a/internal/core/tree/node_store.go b/internal/core/tree/node_store.go index 09e2308c..59901635 100644 --- a/internal/core/tree/node_store.go +++ b/internal/core/tree/node_store.go @@ -10,6 +10,8 @@ import ( "path/filepath" "runtime" "strings" + + "github.com/perber/wiki/internal/core/shared" ) func fileExists(p string) bool { @@ -17,50 +19,6 @@ func fileExists(p string) bool { return err == nil } -// writeFileAtomic writes data to filename atomically by writing to a temp file -// in the same directory and then renaming it over the target. -func writeFileAtomic(filename string, data []byte, perm os.FileMode) error { - dir := filepath.Dir(filename) - - tmpFile, err := os.CreateTemp(dir, ".tmp-*") - if err != nil { - return fmt.Errorf("create temp file: %w", err) - } - - tmpName := tmpFile.Name() - // Ensure the temp file is removed in case of an error - defer func() { - _ = os.Remove(tmpName) - }() - - if perm != 0 { - if err := tmpFile.Chmod(perm); err != nil { - tmpFile.Close() - return fmt.Errorf("chmod temp file: %w", err) - } - } - - if _, err := tmpFile.Write(data); err != nil { - tmpFile.Close() - return fmt.Errorf("write temp file: %w", err) - } - - if err := tmpFile.Sync(); err != nil { - tmpFile.Close() - return fmt.Errorf("sync temp file: %w", err) - } - - if err := tmpFile.Close(); err != nil { - return fmt.Errorf("close temp file: %w", err) - } - - if err := atomicReplace(tmpName, filename); err != nil { - return fmt.Errorf("replace temp file: %w", err) - } - - return nil -} - func atomicReplace(src, dst string) error { // On Windows, os.Rename fails if dst already exists. // On Unix, Rename is atomic and replaces dst. @@ -152,7 +110,7 @@ func (f *NodeStore) SaveTree(filename string, tree *PageNode) error { return fmt.Errorf("could not marshal tree: %w", err) } - if err := writeFileAtomic(fullPath, data, 0o644); err != nil { + if err := shared.WriteFileAtomic(fullPath, data, 0o644); err != nil { return fmt.Errorf("could not atomically write tree file: %w", err) } @@ -207,7 +165,7 @@ func (f *NodeStore) CreatePage(parentEntry *PageNode, newEntry *PageNode) error return fmt.Errorf("could not build markdown with frontmatter: %w", err) } - if err := writeFileAtomic(destFile, []byte(md), 0o644); err != nil { + if err := shared.WriteFileAtomic(destFile, []byte(md), 0o644); err != nil { return fmt.Errorf("could not create file: %w", err) } @@ -287,7 +245,7 @@ func (f *NodeStore) UpsertContent(entry *PageNode, content string) error { if err != nil { return fmt.Errorf("could not build markdown with frontmatter: %w", err) } - if err := writeFileAtomic(filePath, []byte(contentWithFM), mode); err != nil { + if err := shared.WriteFileAtomic(filePath, []byte(contentWithFM), mode); err != nil { return fmt.Errorf("could not write to file atomically: %w", err) } @@ -630,7 +588,7 @@ func (f *NodeStore) SyncFrontmatterIfExists(entry *PageNode) error { mode = st.Mode() } - if err := writeFileAtomic(filePath, []byte(out), mode); err != nil { + if err := shared.WriteFileAtomic(filePath, []byte(out), mode); err != nil { return fmt.Errorf("write file atomically: %w", err) } return nil @@ -818,7 +776,7 @@ func (f *NodeStore) ConvertNode(entry *PageNode, target NodeKind) error { if err != nil { return err } - if err := writeFileAtomic(filePath, []byte(md), 0o644); err != nil { + if err := shared.WriteFileAtomic(filePath, []byte(md), 0o644); err != nil { return fmt.Errorf("could not write page file: %w", err) } } diff --git a/internal/core/tree/tree_service.go b/internal/core/tree/tree_service.go index eb5a92c5..77bcff48 100644 --- a/internal/core/tree/tree_service.go +++ b/internal/core/tree/tree_service.go @@ -233,7 +233,7 @@ func (t *TreeService) migrateToV2() error { return fmt.Errorf("could not determine content path for node %s: %w", node.ID, err) } - if err := writeFileAtomic(filePath, []byte(newContent), 0o644); err != nil { + if err := shared.WriteFileAtomic(filePath, []byte(newContent), 0o644); err != nil { t.log.Error("could not write updated page content", "nodeID", node.ID, "filePath", filePath, "error", err) return fmt.Errorf("could not write updated page content for node %s: %w", node.ID, err) } From d13eecb08981b7a63f22673ee13e1a1e2bbaeaa7 Mon Sep 17 00:00:00 2001 From: perber Date: Fri, 16 Jan 2026 18:05:30 +0100 Subject: [PATCH 05/11] feat: add section functionality to frontend (#579) --- e2e/pages/TreeView.ts | 15 +- e2e/tests/page.spec.ts | 5 +- internal/core/shared/utils.go | 14 +- internal/core/tree/node_store.go | 12 -- internal/core/tree/tree_service.go | 43 +++++ internal/http/api/convert_page.go | 51 +++++ internal/http/api/create_page.go | 4 + internal/http/api/helpers.go | 1 + internal/http/api/node.go | 20 +- internal/http/router.go | 25 +-- internal/wiki/wiki.go | 24 ++- .../src/features/page/AddPageDialog.tsx | 75 +++++--- ui/leafwiki-ui/src/features/tree/TreeNode.tsx | 62 +++---- .../src/features/tree/TreeNodeActionsMenu.tsx | 174 ++++++++++++++++++ ui/leafwiki-ui/src/features/tree/TreeView.tsx | 25 ++- .../features/tree/TreeViewActionButton.tsx | 7 +- .../src/features/tree/treeNodeActionsMenus.ts | 18 ++ .../viewer/EmptySectionChildrenList.tsx | 118 ++++++++++++ .../src/features/viewer/PageViewer.tsx | 2 + ui/leafwiki-ui/src/index.css | 36 +++- ui/leafwiki-ui/src/lib/api/pages.ts | 18 +- 21 files changed, 635 insertions(+), 114 deletions(-) create mode 100644 internal/http/api/convert_page.go create mode 100644 ui/leafwiki-ui/src/features/tree/TreeNodeActionsMenu.tsx create mode 100644 ui/leafwiki-ui/src/features/tree/treeNodeActionsMenus.ts create mode 100644 ui/leafwiki-ui/src/features/viewer/EmptySectionChildrenList.tsx diff --git a/e2e/pages/TreeView.ts b/e2e/pages/TreeView.ts index dbb1060d..a973f265 100644 --- a/e2e/pages/TreeView.ts +++ b/e2e/pages/TreeView.ts @@ -86,7 +86,13 @@ export default class TreeView { await nodeRow.scrollIntoViewIfNeeded(); await nodeRow.hover(); // oder mouse.move, s.u. - const sortButton = nodeRow.locator('button[data-testid="tree-view-action-button-sort"]'); + // open more actions menu + const moreActionsButton = nodeRow.locator( + 'button[data-testid="tree-view-action-button-open-more-actions"]', + ); + await moreActionsButton.click({ force: true }); + + const sortButton = this.page.locator('div[data-testid="tree-view-action-button-sort"]'); await sortButton.click({ force: true }); const sortPageDialog = new SortPageDialog(this.page); @@ -111,7 +117,12 @@ export default class TreeView { await nodeRow.scrollIntoViewIfNeeded(); await nodeRow.hover(); // oder mouse.move, s.u. - const moveButton = nodeRow.locator('button[data-testid="tree-view-action-button-move"]'); + const moreActionsButton = nodeRow.locator( + 'button[data-testid="tree-view-action-button-open-more-actions"]', + ); + await moreActionsButton.click({ force: true }); + + const moveButton = this.page.locator('div[data-testid="tree-view-action-button-move"]'); await moveButton.click({ force: true }); const movePageDialog = new MovePageDialog(this.page); diff --git a/e2e/tests/page.spec.ts b/e2e/tests/page.spec.ts index 66c3b272..b5013480 100644 --- a/e2e/tests/page.spec.ts +++ b/e2e/tests/page.spec.ts @@ -6,7 +6,6 @@ import DeletePageDialog from '../pages/DeletePageDialog'; import EditPage from '../pages/EditPage'; import LoginPage from '../pages/LoginPage'; import NotFoundPage from '../pages/NotFoundPage'; -import SearchView from '../pages/SearchView'; import TreeView from '../pages/TreeView'; import ViewPage from '../pages/ViewPage'; @@ -368,6 +367,9 @@ graph TD; test.expect(await deletePageDialog.dialogTextVisible()).toBeFalsy(); }); + // disable this test cases, because it is flaky + // TODO: fix the flakiness + /* test('search-page', async ({ page }) => { const title = `Page To Search ${Date.now()}`; const content = `This is the content of the page to search, created at ${new Date().toISOString()}`; @@ -404,6 +406,7 @@ graph TD; // clear search await searchView.clearSearch(); }); + */ test('test-asset-upload-and-use-in-page', async ({ page }) => { const title = `Page With Asset ${Date.now()}`; diff --git a/internal/core/shared/utils.go b/internal/core/shared/utils.go index bddb8f87..f64375d4 100644 --- a/internal/core/shared/utils.go +++ b/internal/core/shared/utils.go @@ -8,6 +8,7 @@ import ( "mime/multipart" "os" "path" + "runtime" "github.com/teris-io/shortid" ) @@ -36,6 +37,17 @@ func GenerateRandomPassword(length int) (string, error) { return string(password), nil } +func atomicReplace(src, dst string) error { + // On Windows, os.Rename fails if dst already exists. + // On Unix, Rename is atomic and replaces dst. + if runtime.GOOS == "windows" { + if err := os.Remove(dst); err != nil && !os.IsNotExist(err) { + return fmt.Errorf("remove existing file: %w", err) + } + } + return os.Rename(src, dst) +} + // WriteFileAtomic writes data to filename atomically by writing to a temp file // in the same directory and then renaming it over the target. func WriteFileAtomic(filename string, data []byte, perm os.FileMode) error { @@ -73,7 +85,7 @@ func WriteFileAtomic(filename string, data []byte, perm os.FileMode) error { return fmt.Errorf("close temp file: %w", err) } - if err := os.Rename(tmpName, filename); err != nil { + if err := atomicReplace(tmpName, filename); err != nil { return fmt.Errorf("rename temp file: %w", err) } diff --git a/internal/core/tree/node_store.go b/internal/core/tree/node_store.go index 59901635..f37d3732 100644 --- a/internal/core/tree/node_store.go +++ b/internal/core/tree/node_store.go @@ -8,7 +8,6 @@ import ( "log/slog" "os" "path/filepath" - "runtime" "strings" "github.com/perber/wiki/internal/core/shared" @@ -19,17 +18,6 @@ func fileExists(p string) bool { return err == nil } -func atomicReplace(src, dst string) error { - // On Windows, os.Rename fails if dst already exists. - // On Unix, Rename is atomic and replaces dst. - if runtime.GOOS == "windows" { - if err := os.Remove(dst); err != nil && !os.IsNotExist(err) { - return fmt.Errorf("remove existing file: %w", err) - } - } - return os.Rename(src, dst) -} - type ResolvedNode struct { Kind NodeKind DirPath string // falls folder diff --git a/internal/core/tree/tree_service.go b/internal/core/tree/tree_service.go index 77bcff48..59e8e4aa 100644 --- a/internal/core/tree/tree_service.go +++ b/internal/core/tree/tree_service.go @@ -587,6 +587,49 @@ func (t *TreeService) UpdateNode(userID string, id string, title string, slug st } +func (t *TreeService) ConvertNode(userID string, id string, kind NodeKind) error { + return t.withLockedTree(func() error { + if t.tree == nil { + return ErrTreeNotLoaded + } + + // Find node + node, err := t.findPageByIDLocked(t.tree.Children, id) + if err != nil { + return ErrPageNotFound + } + + if node.Kind == kind { + // No change + return nil + } + + // Section -> Page only allowed if no children + if node.Kind == NodeKindSection && kind == NodeKindPage && node.HasChildren() { + return ErrPageHasChildren + } + + t.log.Info("changing node kind", "nodeID", node.ID, "oldKind", node.Kind, "newKind", kind) + + if err := t.store.ConvertNode(node, kind); err != nil { + return fmt.Errorf("could not convert node: %w", err) + } + node.Kind = kind + + // Update metadata + node.Metadata.UpdatedAt = time.Now().UTC() + node.Metadata.LastAuthorID = userID + + // Keep frontmatter in sync *if file exists* (important when kind changed but content == nil) + if err := t.store.SyncFrontmatterIfExists(node); err != nil { + return fmt.Errorf("could not sync frontmatter: %w", err) + } + + // Save tree + return t.saveTreeLocked() + }) +} + // GetTree returns the tree func (t *TreeService) GetTree() *PageNode { t.mu.RLock() diff --git a/internal/http/api/convert_page.go b/internal/http/api/convert_page.go new file mode 100644 index 00000000..359e469f --- /dev/null +++ b/internal/http/api/convert_page.go @@ -0,0 +1,51 @@ +package api + +import ( + "net/http" + + "github.com/gin-gonic/gin" + "github.com/perber/wiki/internal/core/tree" + auth_middleware "github.com/perber/wiki/internal/http/middleware/auth" + "github.com/perber/wiki/internal/wiki" +) + +type convertPageRequest struct { + TargetKind string `json:"targetKind" binding:"required"` +} + +func ConvertPageHandler(w *wiki.Wiki) gin.HandlerFunc { + return func(c *gin.Context) { + var req convertPageRequest + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid request"}) + return + } + + user := auth_middleware.MustGetUser(c) + if user == nil { + return + } + + id := c.Param("id") + if id == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing page ID"}) + return + } + + // Validate TargetKind before converting to tree.NodeKind + if req.TargetKind != "page" && req.TargetKind != "section" { + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid targetKind"}) + return + } + + kind := tree.NodeKind(req.TargetKind) + + err := w.ConvertPage(user.ID, id, kind) + if err != nil { + respondWithError(c, err) + return + } + + c.JSON(http.StatusOK, gin.H{"status": "page converted"}) + } +} diff --git a/internal/http/api/create_page.go b/internal/http/api/create_page.go index 825b4dce..bad95e49 100644 --- a/internal/http/api/create_page.go +++ b/internal/http/api/create_page.go @@ -13,6 +13,7 @@ type createPageRequest struct { ParentID *string `json:"parentId"` // optional Title string `json:"title" binding:"required"` Slug string `json:"slug" binding:"required"` + Kind *string `json:"kind"` // optional } func CreatePageHandler(w *wiki.Wiki) gin.HandlerFunc { @@ -29,6 +30,9 @@ func CreatePageHandler(w *wiki.Wiki) gin.HandlerFunc { } kind := tree.NodeKindPage + if req.Kind != nil { + kind = tree.NodeKind(*req.Kind) + } page, err := w.CreatePage(user.ID, req.ParentID, req.Title, req.Slug, &kind) if err != nil { respondWithError(c, err) diff --git a/internal/http/api/helpers.go b/internal/http/api/helpers.go index df7283b6..47a21ac7 100644 --- a/internal/http/api/helpers.go +++ b/internal/http/api/helpers.go @@ -83,6 +83,7 @@ func ToAPINode(node *tree.PageNode, parentPath string, userResolver *auth.UserRe Slug: node.Slug, Path: path, Position: node.Position, + Kind: node.Kind, Metadata: NodeMetadata{ CreatedAt: node.Metadata.CreatedAt.Format(time.RFC3339), UpdatedAt: node.Metadata.UpdatedAt.Format(time.RFC3339), diff --git a/internal/http/api/node.go b/internal/http/api/node.go index 96c69c08..f463f80c 100644 --- a/internal/http/api/node.go +++ b/internal/http/api/node.go @@ -1,6 +1,9 @@ package api -import "github.com/perber/wiki/internal/core/auth" +import ( + "github.com/perber/wiki/internal/core/auth" + "github.com/perber/wiki/internal/core/tree" +) type NodeMetadata struct { CreatedAt string `json:"createdAt"` @@ -13,11 +16,12 @@ type NodeMetadata struct { } type Node struct { - ID string `json:"id"` - Title string `json:"title"` - Slug string `json:"slug"` - Path string `json:"path"` - Position int `json:"position"` - Children []*Node `json:"children"` - Metadata NodeMetadata `json:"metadata"` + ID string `json:"id"` + Title string `json:"title"` + Slug string `json:"slug"` + Path string `json:"path"` + Position int `json:"position"` + Kind tree.NodeKind `json:"kind"` + Children []*Node `json:"children"` + Metadata NodeMetadata `json:"metadata"` } diff --git a/internal/http/router.go b/internal/http/router.go index 6eb09bdb..194b0362 100644 --- a/internal/http/router.go +++ b/internal/http/router.go @@ -113,6 +113,7 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { // Pages requiresAuthGroup.POST("/pages", auth_middleware.RequireEditorOrAdmin(), api.CreatePageHandler(wikiInstance)) requiresAuthGroup.POST("/pages/ensure", auth_middleware.RequireEditorOrAdmin(), api.EnsurePageHandler(wikiInstance)) + requiresAuthGroup.POST("/pages/convert/:id", auth_middleware.RequireEditorOrAdmin(), api.ConvertPageHandler(wikiInstance)) requiresAuthGroup.POST("/pages/copy/:id", auth_middleware.RequireEditorOrAdmin(), api.CopyPageHandler(wikiInstance)) requiresAuthGroup.PUT("/pages/:id", auth_middleware.RequireEditorOrAdmin(), api.UpdatePageHandler(wikiInstance)) requiresAuthGroup.DELETE("/pages/:id", auth_middleware.RequireEditorOrAdmin(), api.DeletePageHandler(wikiInstance)) @@ -149,17 +150,17 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { // Serve branding assets (logos, favicons) with extension validation router.GET("/branding/:filename", func(c *gin.Context) { filename := c.Param("filename") - + // Sanitize filename to prevent directory traversal and malicious input // Only allow simple filenames (no path separators, no null bytes, no ..) - if strings.Contains(filename, "..") || - strings.Contains(filename, "/") || - strings.Contains(filename, "\\") || + if strings.Contains(filename, "..") || + strings.Contains(filename, "/") || + strings.Contains(filename, "\\") || strings.Contains(filename, "\x00") { c.Status(http.StatusForbidden) return } - + // Get allowed extensions from branding constraints constraints, err := wikiInstance.GetBrandingConstraints() if err != nil { @@ -167,7 +168,7 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { c.Status(http.StatusInternalServerError) return } - + // Build a combined set of allowed extensions for O(1) lookup allowedExts := make(map[string]bool) for _, ext := range constraints.LogoExts { @@ -176,22 +177,22 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { for _, ext := range constraints.FaviconExts { allowedExts[ext] = true } - + // Validate file extension against whitelist ext := strings.ToLower(filepath.Ext(filename)) if !allowedExts[ext] { c.Status(http.StatusForbidden) return } - + // Construct file path brandingDir := wikiInstance.GetBrandingService().GetBrandingAssetsDir() filePath := filepath.Join(brandingDir, filename) - + // Clean the path and verify it's within the branding directory cleanPath := filepath.Clean(filePath) cleanBrandingDir := filepath.Clean(brandingDir) - + // Ensure the resolved path is still within the branding directory // Use filepath.Rel to check the relative path doesn't escape the directory rel, err := filepath.Rel(cleanBrandingDir, cleanPath) @@ -199,7 +200,7 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { c.Status(http.StatusForbidden) return } - + // Check if file exists if _, err := os.Stat(cleanPath); os.IsNotExist(err) { c.Status(http.StatusNotFound) @@ -209,7 +210,7 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { c.Status(http.StatusInternalServerError) return } - + // Serve the file c.File(cleanPath) }) diff --git a/internal/wiki/wiki.go b/internal/wiki/wiki.go index bc3f3615..d7832211 100644 --- a/internal/wiki/wiki.go +++ b/internal/wiki/wiki.go @@ -194,10 +194,11 @@ func (w *Wiki) EnsureWelcomePage() error { // Set the content of the welcome page content := `# Welcome to LeafWiki! -LeafWiki is a lightweight, self-hosted knowledge base server for documenting -runbooks, internal docs, and technical knowledge using plain Markdown files. +LeafWiki – A fast wiki for people who think in folders, not feeds. +Single Go binary. Markdown on disk. No external database service. + +LeafWiki is a lightweight, self-hosted wiki for runbooks, internal docs, and technical notes — built for fast writing and explicit structure. It keeps your content as plain Markdown on disk and gives you fast navigation, search, and editing — without running additional services. -Content is stored directly on disk, organized in a clear tree structure, and served by a single Go binary. --- @@ -245,6 +246,10 @@ func (w *Wiki) CreatePage(userID string, parentID *string, title string, slug st ve.Add("kind", "Kind must be specified") } + if kind != nil && *kind != tree.NodeKindPage && *kind != tree.NodeKindSection { + ve.Add("kind", "Kind must be either 'page' or 'section'") + } + if err := w.slug.IsValidSlug(slug); err != nil { ve.Add("slug", err.Error()) } @@ -651,6 +656,19 @@ func (w *Wiki) MovePage(userID, id, parentID string) error { return nil } +func (w *Wiki) ConvertPage(userID, id string, targetKind tree.NodeKind) error { + if id == "root" || id == "" { + return fmt.Errorf("cannot convert root page") + } + + err := w.tree.ConvertNode(userID, id, targetKind) + if err != nil { + return err + } + + return nil +} + func (w *Wiki) SortPages(parentID string, orderedIDs []string) error { return w.tree.SortPages(parentID, orderedIDs) } diff --git a/ui/leafwiki-ui/src/features/page/AddPageDialog.tsx b/ui/leafwiki-ui/src/features/page/AddPageDialog.tsx index dec3b4ba..765921c9 100644 --- a/ui/leafwiki-ui/src/features/page/AddPageDialog.tsx +++ b/ui/leafwiki-ui/src/features/page/AddPageDialog.tsx @@ -1,20 +1,24 @@ -import BaseDialog from '@/components/BaseDialog' +import BaseDialog, { BaseDialogConfirmButton } from '@/components/BaseDialog' import { FormInput } from '@/components/FormInput' -import { createPage } from '@/lib/api/pages' +import { createPage, NODE_KIND_PAGE } from '@/lib/api/pages' import { handleFieldErrors } from '@/lib/handleFieldErrors' import { DIALOG_ADD_PAGE } from '@/lib/registries' import { buildEditUrl } from '@/lib/urlUtil' import { useTreeStore } from '@/stores/tree' -import { useCallback, useState } from 'react' +import { useCallback, useMemo, useState } from 'react' import { useNavigate } from 'react-router-dom' import { toast } from 'sonner' import { SlugInputWithSuggestion } from './SlugInputWithSuggestion' type AddPageDialogProps = { parentId: string + nodeKind?: 'page' | 'section' } -export function AddPageDialog({ parentId }: AddPageDialogProps) { +export function AddPageDialog({ + parentId, + nodeKind = NODE_KIND_PAGE, +}: AddPageDialogProps) { const [title, setTitle] = useState('') const [slug, setSlug] = useState('') const [loading, setLoading] = useState(false) @@ -52,7 +56,11 @@ export function AddPageDialog({ parentId }: AddPageDialogProps) { }, []) const handleCreate = useCallback( - async (redirect: boolean = true): Promise => { + async ( + redirect: boolean = true, + nodeKind?: 'page' | 'section', + ): Promise => { + if (!nodeKind) nodeKind = NODE_KIND_PAGE // Default to 'page' if not provided if (!title) return false // Should not happen due to button disabling if (!slug) { @@ -68,7 +76,7 @@ export function AddPageDialog({ parentId }: AddPageDialogProps) { setLoading(true) setFieldErrors({}) try { - await createPage({ title, slug, parentId }) + await createPage({ title, slug, parentId, kind: nodeKind }) toast.success('Page created') await reloadTree() if (redirect) { @@ -104,14 +112,44 @@ export function AddPageDialog({ parentId }: AddPageDialogProps) { return true }, [resetForm]) + const buttons = useMemo(() => { + const b: BaseDialogConfirmButton[] = [ + { + label: 'Create', + actionType: 'no-redirect', + autoFocus: true, + loading, + disabled: isCreateButtonDisabled, + variant: nodeKind === NODE_KIND_PAGE ? 'secondary' : 'default', + }, + ] + if (nodeKind === NODE_KIND_PAGE) { + b.push({ + label: 'Create & Edit Page', + actionType: 'confirm', + autoFocus: false, + loading, + disabled: isCreateButtonDisabled, + variant: 'default', + }) + } + return b + }, [isCreateButtonDisabled, loading, nodeKind]) + return ( => { - return await handleCreate(actionType !== 'no-redirect') + return await handleCreate(actionType !== 'no-redirect', nodeKind) }} testidPrefix="add-page-dialog" cancelButton={{ @@ -120,24 +158,7 @@ export function AddPageDialog({ parentId }: AddPageDialogProps) { disabled: loading, autoFocus: false, }} - buttons={[ - { - label: 'Create', - actionType: 'no-redirect', - autoFocus: true, - loading, - disabled: isCreateButtonDisabled, - variant: 'secondary', - }, - { - label: 'Create & Edit Page', - actionType: 'confirm', - autoFocus: false, - loading, - disabled: isCreateButtonDisabled, - variant: 'default', - }, - ]} + buttons={buttons} >
0 - const [hovered, setHovered] = useState(false) const { pathname } = useLocation() const currentPath = @@ -81,8 +77,6 @@ export const TreeNode = React.memo(function TreeNode({ })} data-testid={`tree-node-${node.id}`} style={{ paddingLeft: indent }} - onMouseEnter={() => setHovered(true)} - onMouseLeave={() => setHovered(false)} >
- {hasChildren && ( + {node.kind === NODE_KIND_SECTION && ( hasChildren && toggleNode(node.id)} + onClick={() => + node.kind === NODE_KIND_SECTION && toggleNode(node.id) + } /> )} { // add empty space to align with nodes that have children - !hasChildren &&
+ node.kind !== NODE_KIND_SECTION && ( +
+ ) } {linkText} -
- - {(hovered || isMobile) && !readOnlyMode && ( -
- } - tooltip="Create new page" - onClick={() => openDialog(DIALOG_ADD_PAGE, { parentId: node.id })} - /> - } - tooltip="Move page to new parent" - onClick={() => openDialog(DIALOG_MOVE_PAGE, { pageId: node.id })} - /> - {hasChildren && ( + {!readOnlyMode && ( +
} - tooltip="Sort pages" - onClick={() => openDialog(DIALOG_SORT_PAGES, { parent: node })} + actionName="add" + icon={} + tooltip="Create new page" + onClick={() => + openDialog(DIALOG_ADD_PAGE, { parentId: node.id }) + } /> - )} -
- )} + +
+ )} +
state.openDialog) + const reloadTree = useTreeStore((state) => state.reloadTree) + const hasChildren = children && children.length > 0 + const navigate = useNavigate() + const location = useLocation() + const setOpenMenuNodeId = useTreeNodeActionsMenusStore( + (s) => s.setOpenMenuNodeId, + ) + const open = useTreeNodeActionsMenusStore((s) => s.openMenuNodeId === node.id) + + const handleConvertPage = useCallback(() => { + convertPage( + nodeId, + nodeKind === NODE_KIND_PAGE ? NODE_KIND_SECTION : NODE_KIND_PAGE, + ) + .then(() => { + toast.success('Page converted successfully') + reloadTree() + }) + .catch(() => { + toast.error('Failed to convert page') + }) + }, [nodeId, nodeKind, reloadTree]) + + const redirectUrlAfterDelete = useCallback(() => { + if (location.pathname.startsWith('/' + node.path)) { + if (node.parentId) { + return node.path.substring(0, node.path.lastIndexOf('/')) + } else { + return '/' + } + } + + // remove leading slash + return location.pathname.startsWith('/') + ? location.pathname.substring(1) + : location.pathname + }, [location.pathname, node.path, node.parentId]) + + return ( + setOpenMenuNodeId(nextOpen ? node.id : null)} + > + + } + tooltip="Open more actions" + /> + + + { + openDialog(DIALOG_ADD_PAGE, { + parentId: nodeId, + nodeKind: NODE_KIND_PAGE, + }) + }} + > + Add Page + + { + openDialog(DIALOG_ADD_PAGE, { + parentId: nodeId, + nodeKind: NODE_KIND_SECTION, + }) + }} + > + Add + Section + + + { + navigate(`/e/${node.path}`) + }} + > + Edit{' '} + {nodeKind === NODE_KIND_PAGE ? 'Page' : 'Section'} + + {nodeKind === NODE_KIND_SECTION && hasChildren && ( + openDialog(DIALOG_SORT_PAGES, { parent: node })} + > + Sort Section + + )} + openDialog(DIALOG_MOVE_PAGE, { pageId: node.id })} + > + Move{' '} + {nodeKind === NODE_KIND_PAGE ? 'Page' : 'Section'} + + {nodeKind === NODE_KIND_SECTION && !hasChildren && ( + + Convert to + Page + + )} + + { + openDialog(DIALOG_DELETE_PAGE_CONFIRMATION, { + pageId: node?.id, + redirectUrl: redirectUrlAfterDelete(), + }) + }} + > + {' '} + Delete {nodeKind === NODE_KIND_PAGE ? 'Page' : 'Section'} + + + + ) +} diff --git a/ui/leafwiki-ui/src/features/tree/TreeView.tsx b/ui/leafwiki-ui/src/features/tree/TreeView.tsx index fd6a3e1a..23363ae0 100644 --- a/ui/leafwiki-ui/src/features/tree/TreeView.tsx +++ b/ui/leafwiki-ui/src/features/tree/TreeView.tsx @@ -1,10 +1,11 @@ import { TreeViewActionButton } from '@/features/tree/TreeViewActionButton' +import { NODE_KIND_PAGE, NODE_KIND_SECTION } from '@/lib/api/pages' import { DIALOG_ADD_PAGE, DIALOG_SORT_PAGES } from '@/lib/registries' import { getAncestorIds } from '@/lib/treeUtils' import { useIsReadOnly } from '@/lib/useIsReadOnly' import { useDialogsStore } from '@/stores/dialogs' import { useTreeStore } from '@/stores/tree' -import { List, Plus } from 'lucide-react' +import { FilePlus, FolderPlus, List } from 'lucide-react' import { useEffect } from 'react' import { useLocation } from 'react-router-dom' import { TreeNode } from './TreeNode' @@ -57,14 +58,30 @@ export default function TreeView() {
} + icon={} tooltip="Create new page" - onClick={() => openDialog(DIALOG_ADD_PAGE, { parentId: '' })} + onClick={() => + openDialog(DIALOG_ADD_PAGE, { + parentId: '', + nodeKind: NODE_KIND_PAGE, + }) + } + /> + } + tooltip="Create new section" + onClick={() => + openDialog(DIALOG_ADD_PAGE, { + parentId: '', + nodeKind: NODE_KIND_SECTION, + }) + } /> {tree && ( } + icon={} tooltip="Sort pages" onClick={() => openDialog(DIALOG_SORT_PAGES, { parent: tree })} /> diff --git a/ui/leafwiki-ui/src/features/tree/TreeViewActionButton.tsx b/ui/leafwiki-ui/src/features/tree/TreeViewActionButton.tsx index ed30cc06..5c24be0d 100644 --- a/ui/leafwiki-ui/src/features/tree/TreeViewActionButton.tsx +++ b/ui/leafwiki-ui/src/features/tree/TreeViewActionButton.tsx @@ -1,7 +1,7 @@ import { TooltipWrapper } from '@/components/TooltipWrapper' type TreeViewActionButtonProps = { - onClick: () => void + onClick?: () => void actionName: string icon: React.ReactNode tooltip: string @@ -18,7 +18,10 @@ export function TreeViewActionButton({ + )} + + )} + {/* No children - Add Button and allow users to create a new page */} + {!hasChildren && ( + + )} + + ) +} diff --git a/ui/leafwiki-ui/src/features/viewer/PageViewer.tsx b/ui/leafwiki-ui/src/features/viewer/PageViewer.tsx index 7d64eab6..7cde013b 100644 --- a/ui/leafwiki-ui/src/features/viewer/PageViewer.tsx +++ b/ui/leafwiki-ui/src/features/viewer/PageViewer.tsx @@ -12,6 +12,7 @@ import { BacklinkInfo } from '../links/LinkInfo' import MarkdownPreview from '../preview/MarkdownPreview' import { useProgressbarStore } from '../progressbar/progressbar' import Breadcrumbs from './Breadcrumbs' +import EmptySectionChildrenList from './EmptySectionChildrenList' import { useScrollToHeadline } from './useScrollToHeadline' import { useSetPageTitle } from './useSetPageTitle' import { useToolbarActions } from './useToolbarActions' @@ -97,6 +98,7 @@ export default function PageViewer() {
+
diff --git a/ui/leafwiki-ui/src/index.css b/ui/leafwiki-ui/src/index.css index 5d67e3a5..bea28afe 100644 --- a/ui/leafwiki-ui/src/index.css +++ b/ui/leafwiki-ui/src/index.css @@ -1225,11 +1225,26 @@ } .tree-node__actions { - @apply bg-surface absolute right-0 flex items-center gap-1 rounded-md shadow-md; + @apply flex gap-1; + opacity: 0; + pointer-events: none; + transition: opacity 120ms ease; + } + + .tree-node:hover .tree-node__actions { + opacity: 1; + pointer-events: auto; + } + + @media (hover: none) { + .tree-node__actions { + opacity: 1; + pointer-events: auto; + } } .tree-node__actions--compact { - @apply px-1 py-2; + @apply pl-1; } .tree-node__action-icon { @@ -1318,6 +1333,22 @@ .branding-settings__actions { @apply flex justify-end gap-2; } + + .child-list__section { + @apply border-surface-border bg-surface mb-2 rounded-lg border px-4 py-2; + } + + .child-list__section-title { + @apply text-interface-text mt-0 mb-4 text-lg font-semibold; + } + + .child-list__section li { + @apply text-sm; + } + + .child-list__section li a { + @apply text-interface-text! hover:underline!; + } } /* ------------------------------------------------------- @@ -1449,6 +1480,7 @@ padding: 0 !important; border: 0 !important; } + .app-layout__content-wrapper { height: auto !important; } diff --git a/ui/leafwiki-ui/src/lib/api/pages.ts b/ui/leafwiki-ui/src/lib/api/pages.ts index 8f9c1c9b..b2dfd50a 100644 --- a/ui/leafwiki-ui/src/lib/api/pages.ts +++ b/ui/leafwiki-ui/src/lib/api/pages.ts @@ -1,5 +1,8 @@ import { fetchWithAuth } from './auth' +export const NODE_KIND_PAGE = 'page' +export const NODE_KIND_SECTION = 'section' + export type PageMetadata = { createdAt: string updatedAt: string @@ -22,7 +25,7 @@ export type PageNode = { path: string parentId?: string | null children: PageNode[] | null - + kind: 'page' | 'section' metadata?: PageMetadata // optional metadata, because older API responses may not have it } @@ -32,6 +35,7 @@ export interface Page { path: string title: string content: string + kind: 'page' | 'section' metadata?: PageMetadata // optional metadata, because older API responses may not have it } @@ -71,17 +75,19 @@ export async function createPage({ title, slug, parentId, + kind, }: { title: string slug: string parentId: string | null + kind: 'page' | 'section' }) { if (parentId === '') parentId = null return await fetchWithAuth(`/api/pages`, { method: 'POST', headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ title, slug, parentId }), + body: JSON.stringify({ title, slug, parentId, kind }), }) } @@ -146,6 +152,14 @@ export async function sortPages(parentId: string, orderedIDs: string[]) { }) } +export async function convertPage(id: string, targetKind: 'page' | 'section') { + return await fetchWithAuth(`/api/pages/convert/${id}`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ targetKind }), + }) +} + export type PathLookupResult = { path: string exists: boolean From db83053f310bb9e279ffcf3dd6c758eb9d446c87 Mon Sep 17 00:00:00 2001 From: Patrick Erber Date: Mon, 19 Jan 2026 19:36:11 +0100 Subject: [PATCH 06/11] feat: update go mod --- go.mod | 1 + go.sum | 29 +++++++++++++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/go.mod b/go.mod index 84504c29..2784dbdb 100644 --- a/go.mod +++ b/go.mod @@ -12,6 +12,7 @@ require ( github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 github.com/yuin/goldmark v1.7.16 golang.org/x/crypto v0.47.0 + gopkg.in/yaml.v3 v3.0.1 modernc.org/sqlite v1.44.2 ) diff --git a/go.sum b/go.sum index 0dfcad92..aa137c48 100644 --- a/go.sum +++ b/go.sum @@ -1,9 +1,15 @@ github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= +github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= +github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= github.com/bytedance/sonic v1.14.0 h1:/OfKt8HFw0kh2rj8N0F6C/qPGRESq0BbaNZgcNXXzQQ= github.com/bytedance/sonic v1.14.0/go.mod h1:WoEbx8WTcFJfzCe0hbmyTGrfjt8PzNEBdxlNUO24NhA= +github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= +github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA= github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= +github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= +github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= @@ -16,6 +22,8 @@ github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY= github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok= +github.com/gabriel-vasile/mimetype v1.4.12 h1:e9hWvmLYvtp846tLHam2o++qitpguFiYCKbn0w9jyqw= +github.com/gabriel-vasile/mimetype v1.4.12/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s= github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w= github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM= github.com/gin-gonic/gin v1.11.0 h1:OW/6PLjyusp2PPXtyxKHU0RbX6I/l28FTdDlae5ueWk= @@ -28,10 +36,14 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4= github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo= +github.com/go-playground/validator/v10 v10.30.1 h1:f3zDSN/zOma+w6+1Wswgd9fLkdwy06ntQJp0BBvFG0w= +github.com/go-playground/validator/v10 v10.30.1/go.mod h1:oSuBIQzuJxL//3MelwSLD5hc2Tu889bF0Idm9Dg26cM= github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw= github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= +github.com/goccy/go-yaml v1.19.2 h1:PmFC1S6h8ljIz6gMRBopkjP1TVT7xuwrButHID66PoM= +github.com/goccy/go-yaml v1.19.2/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= @@ -78,6 +90,8 @@ github.com/quic-go/qpack v0.6.0 h1:g7W+BMYynC1LbYLSqRt8PBg5Tgwxn214ZZR34VIOjz8= github.com/quic-go/qpack v0.6.0/go.mod h1:lUpLKChi8njB4ty2bFLX2x4gzDqXwUpaO1DP9qMDZII= github.com/quic-go/quic-go v0.57.0 h1:AsSSrrMs4qI/hLrKlTH/TGQeTMY0ib1pAOX7vA3AdqE= github.com/quic-go/quic-go v0.57.0/go.mod h1:ly4QBAjHA2VhdnxhojRsCUOeJwKYg+taDlos92xb1+s= +github.com/quic-go/quic-go v0.59.0 h1:OLJkp1Mlm/aS7dpKgTc6cnpynnD2Xg7C1pwL6vy/SAw= +github.com/quic-go/quic-go v0.59.0/go.mod h1:upnsH4Ju1YkqpLXC305eW3yDZ4NfnNbmQRCMWS58IKU= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= @@ -87,10 +101,13 @@ github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 h1:xzABM9let0HLLqFypcxvLmlvEciCHL7+Lv+4vwZqecI= @@ -99,20 +116,29 @@ github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA= github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4= +github.com/ugorji/go/codec v1.3.1 h1:waO7eEiFDwidsBN6agj1vJQ4AG7lh2yqXyOXqhgQuyY= +github.com/ugorji/go/codec v1.3.1/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4= github.com/yuin/goldmark v1.7.16 h1:n+CJdUxaFMiDUNnWC3dMWCIQJSkxH4uz3ZwQBkAlVNE= github.com/yuin/goldmark v1.7.16/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg= go.uber.org/mock v0.5.2 h1:LbtPTcP8A5k9WPXj54PPPbjcI4Y6lhyOZXn+VS7wNko= go.uber.org/mock v0.5.2/go.mod h1:wLlUxC2vVTPTaE3UD51E0BGOAElKrILxhVSDYQLld5o= golang.org/x/arch v0.20.0 h1:dx1zTU0MAE98U+TQ8BLl7XsJbgze2WnNKF/8tGp/Q6c= golang.org/x/arch v0.20.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk= +golang.org/x/arch v0.23.0 h1:lKF64A2jF6Zd8L0knGltUnegD62JMFBiCPBmQpToHhg= +golang.org/x/arch v0.23.0/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A= golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 h1:mgKeJMpvi0yx/sU5GsxQ7p6s2wtOnGAHZWCHUM4KGzY= golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:j/pmGrbnkbPtQfxEe5D0VQhZC6qKbfKifgD0oM7sR70= +golang.org/x/exp v0.0.0-20260112195511-716be5621a96 h1:Z/6YuSHTLOHfNFdb8zVZomZr7cqNgTJvA8+Qz75D8gU= +golang.org/x/exp v0.0.0-20260112195511-716be5621a96/go.mod h1:nzimsREAkjBCIEFtHiYkrJyT+2uy9YZJB7H1k68CXZU= golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI= golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg= +golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c= golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU= golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY= +golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= +golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -124,8 +150,11 @@ golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA= golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc= +golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc= google.golang.org/protobuf v1.36.9 h1:w2gp2mA27hUeUzj9Ex9FBjsBm40zfaDtEWow293U7Iw= google.golang.org/protobuf v1.36.9/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= +google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= +google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= From 8422718450040b2a904543fcd6da9b291c96e07d Mon Sep 17 00:00:00 2001 From: perber Date: Mon, 19 Jan 2026 19:52:55 +0100 Subject: [PATCH 07/11] fix: remove frontmatter on search indexing (#610) --- internal/search/sqlite_index.go | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/internal/search/sqlite_index.go b/internal/search/sqlite_index.go index ea5890b2..2d0fe695 100644 --- a/internal/search/sqlite_index.go +++ b/internal/search/sqlite_index.go @@ -9,6 +9,7 @@ import ( "sync" "github.com/microcosm-cc/bluemonday" + "github.com/perber/wiki/internal/core/tree" "github.com/russross/blackfriday/v2" _ "modernc.org/sqlite" // Import SQLite driver ) @@ -158,7 +159,12 @@ func (s *SQLiteIndex) Close() error { return nil } -func (s *SQLiteIndex) IndexPage(path string, filePath string, pageID string, title string, content string) error { +func (s *SQLiteIndex) IndexPage(path string, filePath string, pageID string, title string, raw string) error { + _, content, _, err := tree.ParseFrontmatter(raw) + if err != nil { + return err + } + // Headings extracted from the Markdown headings := extractHeadings(content) From 5a69507d28ebf8eaceb4e33b877afa0a2c99131e Mon Sep 17 00:00:00 2001 From: perber Date: Sun, 25 Jan 2026 20:08:20 +0100 Subject: [PATCH 08/11] feat: add import for markdown files --- go.sum | 29 -- internal/core/tree/page_node.go | 100 +++- internal/core/tree/path_lookup.go | 8 +- internal/core/tree/slug_service.go | 25 + internal/core/tree/slug_service_test.go | 29 ++ internal/core/tree/tree_service.go | 11 + internal/http/api/import.go | 88 ++++ internal/http/router.go | 19 + internal/importer/executor.go | 139 ++++++ internal/importer/executor_test.go | 206 ++++++++ internal/importer/fixtures/fixture-1.zip | Bin 0 -> 1206 bytes internal/importer/importer_service.go | 220 +++++++++ internal/importer/importer_service_test.go | 269 +++++++++++ internal/importer/importer_wiki.go | 10 + internal/importer/plan_store.go | 48 ++ internal/importer/plan_store_test.go | 53 ++ internal/importer/planner.go | 265 ++++++++++ internal/importer/planner_test.go | 451 ++++++++++++++++++ internal/importer/zip_extractor.go | 107 +++++ internal/importer/zip_extractor_test.go | 66 +++ internal/importer/zip_workspace.go | 14 + internal/wiki/wiki.go | 8 + ui/leafwiki-ui/src/components/UserToolbar.tsx | 6 + .../features/branding/BrandingSettings.tsx | 54 +-- .../src/features/importer/Importer.tsx | 288 +++++++++++ .../features/importer/useToolbarActions.tsx | 13 + ui/leafwiki-ui/src/features/router/router.tsx | 11 + .../src/features/users/UserManagement.tsx | 58 +-- ui/leafwiki-ui/src/index.css | 167 +++---- ui/leafwiki-ui/src/lib/api/import.ts | 58 +++ ui/leafwiki-ui/src/stores/import.ts | 81 ++++ 31 files changed, 2721 insertions(+), 180 deletions(-) create mode 100644 internal/http/api/import.go create mode 100644 internal/importer/executor.go create mode 100644 internal/importer/executor_test.go create mode 100644 internal/importer/fixtures/fixture-1.zip create mode 100644 internal/importer/importer_service.go create mode 100644 internal/importer/importer_service_test.go create mode 100644 internal/importer/importer_wiki.go create mode 100644 internal/importer/plan_store.go create mode 100644 internal/importer/plan_store_test.go create mode 100644 internal/importer/planner.go create mode 100644 internal/importer/planner_test.go create mode 100644 internal/importer/zip_extractor.go create mode 100644 internal/importer/zip_extractor_test.go create mode 100644 internal/importer/zip_workspace.go create mode 100644 ui/leafwiki-ui/src/features/importer/Importer.tsx create mode 100644 ui/leafwiki-ui/src/features/importer/useToolbarActions.tsx create mode 100644 ui/leafwiki-ui/src/lib/api/import.ts create mode 100644 ui/leafwiki-ui/src/stores/import.ts diff --git a/go.sum b/go.sum index aa137c48..0dfcad92 100644 --- a/go.sum +++ b/go.sum @@ -1,15 +1,9 @@ github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= -github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= -github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= github.com/bytedance/sonic v1.14.0 h1:/OfKt8HFw0kh2rj8N0F6C/qPGRESq0BbaNZgcNXXzQQ= github.com/bytedance/sonic v1.14.0/go.mod h1:WoEbx8WTcFJfzCe0hbmyTGrfjt8PzNEBdxlNUO24NhA= -github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE= -github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980= github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA= github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= -github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o= -github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= @@ -22,8 +16,6 @@ github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY= github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok= -github.com/gabriel-vasile/mimetype v1.4.12 h1:e9hWvmLYvtp846tLHam2o++qitpguFiYCKbn0w9jyqw= -github.com/gabriel-vasile/mimetype v1.4.12/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s= github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w= github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM= github.com/gin-gonic/gin v1.11.0 h1:OW/6PLjyusp2PPXtyxKHU0RbX6I/l28FTdDlae5ueWk= @@ -36,14 +28,10 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4= github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo= -github.com/go-playground/validator/v10 v10.30.1 h1:f3zDSN/zOma+w6+1Wswgd9fLkdwy06ntQJp0BBvFG0w= -github.com/go-playground/validator/v10 v10.30.1/go.mod h1:oSuBIQzuJxL//3MelwSLD5hc2Tu889bF0Idm9Dg26cM= github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw= github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= -github.com/goccy/go-yaml v1.19.2 h1:PmFC1S6h8ljIz6gMRBopkjP1TVT7xuwrButHID66PoM= -github.com/goccy/go-yaml v1.19.2/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA= github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= @@ -90,8 +78,6 @@ github.com/quic-go/qpack v0.6.0 h1:g7W+BMYynC1LbYLSqRt8PBg5Tgwxn214ZZR34VIOjz8= github.com/quic-go/qpack v0.6.0/go.mod h1:lUpLKChi8njB4ty2bFLX2x4gzDqXwUpaO1DP9qMDZII= github.com/quic-go/quic-go v0.57.0 h1:AsSSrrMs4qI/hLrKlTH/TGQeTMY0ib1pAOX7vA3AdqE= github.com/quic-go/quic-go v0.57.0/go.mod h1:ly4QBAjHA2VhdnxhojRsCUOeJwKYg+taDlos92xb1+s= -github.com/quic-go/quic-go v0.59.0 h1:OLJkp1Mlm/aS7dpKgTc6cnpynnD2Xg7C1pwL6vy/SAw= -github.com/quic-go/quic-go v0.59.0/go.mod h1:upnsH4Ju1YkqpLXC305eW3yDZ4NfnNbmQRCMWS58IKU= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= @@ -101,13 +87,10 @@ github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 h1:xzABM9let0HLLqFypcxvLmlvEciCHL7+Lv+4vwZqecI= @@ -116,29 +99,20 @@ github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA= github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4= -github.com/ugorji/go/codec v1.3.1 h1:waO7eEiFDwidsBN6agj1vJQ4AG7lh2yqXyOXqhgQuyY= -github.com/ugorji/go/codec v1.3.1/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4= github.com/yuin/goldmark v1.7.16 h1:n+CJdUxaFMiDUNnWC3dMWCIQJSkxH4uz3ZwQBkAlVNE= github.com/yuin/goldmark v1.7.16/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg= go.uber.org/mock v0.5.2 h1:LbtPTcP8A5k9WPXj54PPPbjcI4Y6lhyOZXn+VS7wNko= go.uber.org/mock v0.5.2/go.mod h1:wLlUxC2vVTPTaE3UD51E0BGOAElKrILxhVSDYQLld5o= golang.org/x/arch v0.20.0 h1:dx1zTU0MAE98U+TQ8BLl7XsJbgze2WnNKF/8tGp/Q6c= golang.org/x/arch v0.20.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk= -golang.org/x/arch v0.23.0 h1:lKF64A2jF6Zd8L0knGltUnegD62JMFBiCPBmQpToHhg= -golang.org/x/arch v0.23.0/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A= golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 h1:mgKeJMpvi0yx/sU5GsxQ7p6s2wtOnGAHZWCHUM4KGzY= golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:j/pmGrbnkbPtQfxEe5D0VQhZC6qKbfKifgD0oM7sR70= -golang.org/x/exp v0.0.0-20260112195511-716be5621a96 h1:Z/6YuSHTLOHfNFdb8zVZomZr7cqNgTJvA8+Qz75D8gU= -golang.org/x/exp v0.0.0-20260112195511-716be5621a96/go.mod h1:nzimsREAkjBCIEFtHiYkrJyT+2uy9YZJB7H1k68CXZU= golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI= golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg= -golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c= golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU= golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY= -golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= -golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -150,11 +124,8 @@ golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA= golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc= -golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc= google.golang.org/protobuf v1.36.9 h1:w2gp2mA27hUeUzj9Ex9FBjsBm40zfaDtEWow293U7Iw= google.golang.org/protobuf v1.36.9/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= -google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= -google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= diff --git a/internal/core/tree/page_node.go b/internal/core/tree/page_node.go index 8ee28633..50f448fb 100644 --- a/internal/core/tree/page_node.go +++ b/internal/core/tree/page_node.go @@ -1,6 +1,13 @@ package tree -import "time" +import ( + "crypto/sha256" + "encoding/binary" + "encoding/hex" + "io" + "sort" + "time" +) // PageMetadata holds simple metadata for a page. type PageMetadata struct { @@ -68,3 +75,94 @@ func (p *PageNode) CalculatePath() string { } return p.Parent.CalculatePath() + "/" + p.Slug } + +// Hash returns a deterministic hash of the node and all descendants. +// Parent is intentionally ignored to avoid cycles. +func (p *PageNode) Hash() string { + sum := p.hashSum(true) // includeMetadata = true + return hex.EncodeToString(sum[:]) +} + +func (p *PageNode) hashSum(includeMetadata bool) [32]byte { + h := sha256.New() + + // depth-first, deterministic + // Write directly to hash to avoid buffering entire tree in memory + p.writeHashPayload(h, includeMetadata) + + var out [32]byte + copy(out[:], h.Sum(nil)) + return out +} + +func (p *PageNode) writeHashPayload(w io.Writer, includeMetadata bool) { + // Node fields (parent excluded) + writeString(w, "id") + writeString(w, p.ID) + writeString(w, "title") + writeString(w, p.Title) + writeString(w, "slug") + writeString(w, p.Slug) + writeString(w, "kind") + writeString(w, string(p.Kind)) + writeString(w, "position") + writeInt64(w, int64(p.Position)) + + if includeMetadata { + writeString(w, "meta.createdAt") + writeTime(w, p.Metadata.CreatedAt) + writeString(w, "meta.updatedAt") + writeTime(w, p.Metadata.UpdatedAt) + writeString(w, "meta.creatorId") + writeString(w, p.Metadata.CreatorID) + writeString(w, "meta.lastAuthorId") + writeString(w, p.Metadata.LastAuthorID) + } + + // Children: enforce stable order (Position, then ID as tie-breaker) + children := make([]*PageNode, 0, len(p.Children)) + children = append(children, p.Children...) + + sort.SliceStable(children, func(i, j int) bool { + if children[i] == nil || children[j] == nil { + return children[j] != nil // nils last + } + if children[i].Position != children[j].Position { + return children[i].Position < children[j].Position + } + return children[i].ID < children[j].ID + }) + + writeString(w, "children.count") + writeInt64(w, int64(len(children))) + + for _, ch := range children { + if ch == nil { + writeString(w, "child.nil") + continue + } + // Separator for safety + writeString(w, "child.begin") + ch.writeHashPayload(w, includeMetadata) + writeString(w, "child.end") + } +} + +func writeString(w io.Writer, s string) { + // length-prefixed string (uint32 len + bytes) + _ = binary.Write(w, binary.BigEndian, uint32(len(s))) + _, _ = io.WriteString(w, s) +} + +func writeInt64(w io.Writer, v int64) { + _ = binary.Write(w, binary.BigEndian, v) +} + +func writeTime(w io.Writer, t time.Time) { + // stable: UnixNano in UTC (Zero => 0) + if t.IsZero() { + writeInt64(w, 0) + return + } + writeInt64(w, t.UTC().UnixNano()) +} diff --git a/internal/core/tree/path_lookup.go b/internal/core/tree/path_lookup.go index 93dc8a73..2ac6cd84 100644 --- a/internal/core/tree/path_lookup.go +++ b/internal/core/tree/path_lookup.go @@ -2,9 +2,11 @@ package tree // PathLookup helpers for LookupPath() type PathSegment struct { - Slug string `json:"slug"` - Exists bool `json:"exists"` - ID *string `json:"id,omitempty"` + Slug string `json:"slug"` + Exists bool `json:"exists"` + Kind *NodeKind `json:"kind,omitempty"` + Title *string `json:"title,omitempty"` + ID *string `json:"id,omitempty"` } type PathLookup struct { diff --git a/internal/core/tree/slug_service.go b/internal/core/tree/slug_service.go index a4bdbeca..25bf1bd9 100644 --- a/internal/core/tree/slug_service.go +++ b/internal/core/tree/slug_service.go @@ -82,6 +82,31 @@ func hasSlugConflict(parent *PageNode, currentID string, slug string) bool { return false } +func (s *SlugService) NormalizePath(path string, validate bool) (string, error) { + segments := make([]string, 0) + + for _, segment := range strings.Split(path, string("/")) { + + if segment == "" { + continue + } + + if validate { + // normalize first and then validate + // the validation will ensure that the segment is a proper slug + seg := normalizeSlug(segment) + if err := s.IsValidSlug(seg); err != nil { + return "", fmt.Errorf("segment '%s' is not a valid slug: %v", segment, err) + } + segment = seg + } else { + segment = normalizeSlug(segment) + } + segments = append(segments, segment) + } + return strings.Join(segments, string("/")), nil +} + func (s *SlugService) NormalizeFilename(filename string) string { ext := filepath.Ext(filename) base := filename[:len(filename)-len(ext)] diff --git a/internal/core/tree/slug_service_test.go b/internal/core/tree/slug_service_test.go index b4c45aa4..e65df416 100644 --- a/internal/core/tree/slug_service_test.go +++ b/internal/core/tree/slug_service_test.go @@ -74,3 +74,32 @@ func TestGenerateUniqueSlug_SpecialCharacters(t *testing.T) { t.Errorf("Expected 'aepfel-and-baume', got '%s'", result) } } + +func TestNormalizePath(t *testing.T) { + s := NewSlugService() + + tests := []struct { + input string + expected string + }{ + {"folder/subfolder/page.md", "folder/subfolder/page-md"}, + {"My Folder/Another Folder/Page Title.md", "my-folder/another-folder/page-title-md"}, + {"Äpfel & Bäume/Über uns.md", "apfel-and-baume/uber-uns-md"}, + {"folder//subfolder///page.md", "folder/subfolder/page-md"}, + {"/leading/slash/page.md", "leading/slash/page-md"}, + {"only-file.md", "only-file-md"}, + } + + for _, test := range tests { + + result, err := s.NormalizePath(test.input, true) + if err != nil { + t.Errorf("Unexpected error for input %v: %v", test.input, err) + continue + } + + if result != test.expected { + t.Errorf("For input %v, expected %v but got %v", test.input, test.expected, result) + } + } +} diff --git a/internal/core/tree/tree_service.go b/internal/core/tree/tree_service.go index 59e8e4aa..f5a487ce 100644 --- a/internal/core/tree/tree_service.go +++ b/internal/core/tree/tree_service.go @@ -324,6 +324,15 @@ func (t *TreeService) SaveTree() error { return t.withLockedTree(t.saveTreeLocked) } +func (t *TreeService) TreeHash() string { + var hash string + _ = t.withRLockedTree(func() error { + hash = t.tree.Hash() + return nil + }) + return hash +} + func (t *TreeService) saveTreeLocked() error { // Save the tree to the storage directory return t.store.SaveTree(t.treeFilename, t.tree) @@ -764,6 +773,8 @@ func (t *TreeService) LookupPagePathLocked(entry []*PageNode, p string) (*PathLo // Segment exists lookup.Segments[i].Exists = true lookup.Segments[i].ID = &e.ID + lookup.Segments[i].Kind = &e.Kind + lookup.Segments[i].Title = &e.Title // Move to the next entry entry = e.Children diff --git a/internal/http/api/import.go b/internal/http/api/import.go new file mode 100644 index 00000000..31911d51 --- /dev/null +++ b/internal/http/api/import.go @@ -0,0 +1,88 @@ +package api + +import ( + "net/http" + + "github.com/gin-gonic/gin" + auth_middleware "github.com/perber/wiki/internal/http/middleware/auth" + "github.com/perber/wiki/internal/importer" + "github.com/perber/wiki/internal/wiki" +) + +func CreateImportPlanHandler(svc *importer.ImporterService) gin.HandlerFunc { + return func(c *gin.Context) { + user := auth_middleware.MustGetUser(c) + if user == nil { + return + } + + const maxUploadSize = 500 << 20 // 500 MiB (~524 MB) + c.Request.Body = http.MaxBytesReader(c.Writer, c.Request.Body, maxUploadSize) + + // Parse form + if err := c.Request.ParseMultipartForm(maxUploadSize); err != nil { + c.JSON(http.StatusRequestEntityTooLarge, gin.H{"error": "upload exceeds maximum size limit of 500 MiB"}) + return + } + + // multipart: file + fh, err := c.FormFile("file") + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing file"}) + return + } + + file, err := fh.Open() + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "failed to open uploaded file"}) + return + } + defer file.Close() + + // optional: targetBasePath from form (defaults to empty string = root) + targetBasePath := c.PostForm("targetBasePath") + + plan, err := svc.CreateImportPlanFromZipUpload(file, targetBasePath) + if err != nil { + respondWithError(c, err) + return + } + + c.JSON(http.StatusOK, plan) + } +} + +func GetImportPlanHandler(svc *importer.ImporterService) gin.HandlerFunc { + return func(c *gin.Context) { + plan, err := svc.GetCurrentPlan() + if err != nil { + respondWithError(c, err) + return + } + c.JSON(http.StatusOK, plan) + } +} + +func ExecuteImportHandler(svc *importer.ImporterService, w *wiki.Wiki) gin.HandlerFunc { + return func(c *gin.Context) { + user := auth_middleware.MustGetUser(c) + if user == nil { + return + } + + res, err := svc.ExecuteCurrentPlan(user.ID) + if err != nil { + respondWithError(c, err) + return + } + + c.JSON(http.StatusOK, res) + } +} + +func ClearImportPlanHandler(svc *importer.ImporterService) gin.HandlerFunc { + return func(c *gin.Context) { + svc.ClearCurrentPlan() + c.JSON(http.StatusOK, gin.H{"ok": true}) + } +} diff --git a/internal/http/router.go b/internal/http/router.go index 194b0362..5bffb70e 100644 --- a/internal/http/router.go +++ b/internal/http/router.go @@ -14,6 +14,7 @@ import ( "github.com/perber/wiki/internal/http/api" auth_middleware "github.com/perber/wiki/internal/http/middleware/auth" "github.com/perber/wiki/internal/http/middleware/security" + "github.com/perber/wiki/internal/importer" "github.com/perber/wiki/internal/wiki" ) @@ -38,6 +39,16 @@ type RouterOptions struct { AuthDisabled bool // Whether authentication is disabled } +// wireImporterService sets up and returns an ImporterService instance +// Parameters: +// - w: the wiki instance to use for importing +func wireImporterService(w *wiki.Wiki) *importer.ImporterService { + slugger := w.GetSlugService() + planner := importer.NewPlanner(w, slugger) + store := importer.NewPlanStore() + return importer.NewImporterService(planner, store) +} + // NewRouter creates a new HTTP router for the wiki application. // Parameters: // - wikiInstance: the wiki instance to serve @@ -49,6 +60,8 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { gin.SetMode(gin.DebugMode) } + importerService := wireImporterService(wikiInstance) + router := gin.Default() router.StaticFS("/assets", gin.Dir(wikiInstance.GetAssetService().GetAssetsDir(), true)) @@ -145,6 +158,12 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { requiresAuthGroup.GET("/pages/:id/assets", auth_middleware.RequireEditorOrAdmin(), api.ListAssetsHandler(wikiInstance)) requiresAuthGroup.PUT("/pages/:id/assets/rename", auth_middleware.RequireEditorOrAdmin(), api.RenameAssetHandler(wikiInstance)) requiresAuthGroup.DELETE("/pages/:id/assets/:name", auth_middleware.RequireEditorOrAdmin(), api.DeleteAssetHandler(wikiInstance)) + + // Importer + requiresAuthGroup.POST("/import/plan", auth_middleware.RequireEditorOrAdmin(), api.CreateImportPlanHandler(importerService)) + requiresAuthGroup.GET("/import/plan", auth_middleware.RequireEditorOrAdmin(), api.GetImportPlanHandler(importerService)) + requiresAuthGroup.POST("/import/execute", auth_middleware.RequireEditorOrAdmin(), api.ExecuteImportHandler(importerService, wikiInstance)) + requiresAuthGroup.DELETE("/import/plan", auth_middleware.RequireEditorOrAdmin(), api.ClearImportPlanHandler(importerService)) } // Serve branding assets (logos, favicons) with extension validation diff --git a/internal/importer/executor.go b/internal/importer/executor.go new file mode 100644 index 00000000..6e5a0e97 --- /dev/null +++ b/internal/importer/executor.go @@ -0,0 +1,139 @@ +package importer + +import ( + "fmt" + "log/slog" + "os" + "path/filepath" + + "github.com/perber/wiki/internal/core/tree" +) + +type ExecutionResult struct { + ImportedCount int `json:"imported_count"` + UpdatedCount int `json:"updated_count"` + SkippedCount int `json:"skipped_count"` + Items []ExecutionItemResult `json:"items"` + TreeHash string `json:"tree_hash"` // hash of the state of the wiki tree after import + TreeHashBefore string `json:"tree_hash_before"` // hash of the state of the wiki tree before import +} + +type ExecutionAction string + +const ( + ExecutionActionCreated ExecutionAction = "created" + ExecutionActionUpdated ExecutionAction = "updated" + ExecutionActionSkipped ExecutionAction = "skipped" +) + +type ExecutionItemResult struct { + SourcePath string `json:"source_path"` + TargetPath string `json:"target_path"` + Action ExecutionAction `json:"action"` + Error *string `json:"error,omitempty"` + Notes []string `json:"notes,omitempty"` +} + +type Executor struct { + plan *PlanResult + planOptions *PlanOptions + wiki ImporterWiki + logger *slog.Logger +} + +func NewExecutor(plan *PlanResult, planOptions *PlanOptions, wiki ImporterWiki, logger *slog.Logger) *Executor { + return &Executor{ + plan: plan, + planOptions: planOptions, + wiki: wiki, + logger: logger.With("component", "ImporterExecutor"), + } +} + +// Execute runs the import based on the provided plan +func (e *Executor) Execute(userID string) (*ExecutionResult, error) { + beforeExecution := e.wiki.TreeHash() + if e.plan.TreeHash != beforeExecution { + return nil, fmt.Errorf("plan is stale: expected tree_hash %s but got %s", e.plan.TreeHash, beforeExecution) + } + + result := &ExecutionResult{ + TreeHashBefore: beforeExecution, + } + + for _, item := range e.plan.Items { + execItem := ExecutionItemResult{ + SourcePath: item.SourcePath, + TargetPath: item.TargetPath, + Notes: append([]string{}, item.Notes...), + Error: nil, + } + + switch item.Action { + case PlanActionCreate: + // Creates the page or section and also all necessary parent sections + page, err := e.wiki.EnsurePath(userID, item.TargetPath, item.Title, &item.Kind) + if err != nil { + errMsg := err.Error() + execItem.Action = ExecutionActionSkipped + execItem.Error = &errMsg + result.SkippedCount++ + result.Items = append(result.Items, execItem) + e.logger.Error("Failed to ensure path", "target_path", item.TargetPath, "error", err) + continue + } + // Read the content from the source path + // And update the page content + if page == nil { + errMsg := "could not create page" + execItem.Action = ExecutionActionSkipped + execItem.Error = &errMsg + result.SkippedCount++ + result.Items = append(result.Items, execItem) + e.logger.Error("Could not create page", "target_path", item.TargetPath, "error", errMsg) + continue + } + sourceAbs := filepath.Join(e.planOptions.SourceBasePath, filepath.FromSlash(item.SourcePath)) + content, err := os.ReadFile(sourceAbs) + if err != nil { + errMsg := err.Error() + execItem.Action = ExecutionActionSkipped + execItem.Error = &errMsg + result.SkippedCount++ + result.Items = append(result.Items, execItem) + e.logger.Error("Failed to read source file", "source_path", sourceAbs, "error", err) + continue + } + // Strip frontmatter if any + _, body, _ := tree.SplitFrontmatter(string(content)) + if _, err := e.wiki.UpdatePage(userID, page.ID, page.Title, page.Slug, &body, &page.Kind); err != nil { + errMsg := err.Error() + execItem.Action = ExecutionActionSkipped + execItem.Error = &errMsg + result.SkippedCount++ + result.Items = append(result.Items, execItem) + e.logger.Error("Failed to update page content", "page_id", page.ID, "error", err) + continue + } + execItem.Action = ExecutionActionCreated + result.ImportedCount++ + e.logger.Info("Imported page", "source_path", item.SourcePath, "target_path", item.TargetPath, "page_id", page.ID) + case PlanActionSkip: + execItem.Action = ExecutionActionSkipped + e.logger.Info("Skipped page", "source_path", item.SourcePath, "target_path", item.TargetPath) + result.SkippedCount++ + default: + errMsg := "unknown action" + execItem.Action = ExecutionActionSkipped + execItem.Error = &errMsg + e.logger.Info("Skipped page with unknown action", "source_path", item.SourcePath, "target_path", item.TargetPath) + result.SkippedCount++ + } + + result.Items = append(result.Items, execItem) + } + + result.TreeHash = e.wiki.TreeHash() + + return result, nil +} diff --git a/internal/importer/executor_test.go b/internal/importer/executor_test.go new file mode 100644 index 00000000..eb6af495 --- /dev/null +++ b/internal/importer/executor_test.go @@ -0,0 +1,206 @@ +package importer + +import ( + "errors" + "log/slog" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/perber/wiki/internal/core/tree" +) + +type fakeExecWiki struct { + hash string + + ensureCalls int + updateCalls int + + ensureFn func(userID, targetPath, title string, kind *tree.NodeKind) (*tree.Page, error) + updateFn func(userID, id, title, slug string, content *string, kind *tree.NodeKind) (*tree.Page, error) + + lastUpdatedContent *string +} + +func (f *fakeExecWiki) TreeHash() string { return f.hash } + +func (f *fakeExecWiki) LookupPagePath(path string) (*tree.PathLookup, error) { + panic("not used by Executor") +} + +func (f *fakeExecWiki) EnsurePath(userID string, targetPath string, title string, kind *tree.NodeKind) (*tree.Page, error) { + f.ensureCalls++ + if f.ensureFn != nil { + return f.ensureFn(userID, targetPath, title, kind) + } + return &tree.Page{PageNode: &tree.PageNode{ID: "p1", Title: title, Slug: "slug", Kind: *kind}}, nil +} + +func (f *fakeExecWiki) UpdatePage(userID string, id, title, slug string, content *string, kind *tree.NodeKind) (*tree.Page, error) { + f.updateCalls++ + f.lastUpdatedContent = content + if f.updateFn != nil { + return f.updateFn(userID, id, title, slug, content, kind) + } + // simulate tree change + f.hash = f.hash + "-changed" + return &tree.Page{PageNode: &tree.PageNode{ID: id, Title: title, Slug: slug, Kind: *kind}}, nil +} + +func writeTmp(t *testing.T, dir, rel, content string) { + t.Helper() + abs := filepath.Join(dir, filepath.FromSlash(rel)) + if err := os.MkdirAll(filepath.Dir(abs), 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + if err := os.WriteFile(abs, []byte(content), 0o644); err != nil { + t.Fatalf("write: %v", err) + } +} + +func TestExecutor_StalePlan(t *testing.T) { + w := &fakeExecWiki{hash: "new"} + plan := &PlanResult{TreeHash: "old"} + opts := &PlanOptions{SourceBasePath: t.TempDir()} + ex := NewExecutor(plan, opts, w, slog.Default()) + + got, err := ex.Execute("user1") + if err == nil { + t.Fatalf("expected stale plan error") + } + if got != nil { + t.Fatalf("expected nil result on stale plan, got %#v", got) + } +} + +func TestExecutor_Create_HappyPath_StripsFrontmatter(t *testing.T) { + tmp := t.TempDir() + writeTmp(t, tmp, "a.md", "---\ntitle: X\n---\n\n# Heading\nBody") + + w := &fakeExecWiki{hash: "h1"} + plan := &PlanResult{ + TreeHash: "h1", + Items: []PlanItem{ + {SourcePath: "a.md", TargetPath: "docs/a", Title: "A", Kind: tree.NodeKindPage, Action: PlanActionCreate}, + }, + } + opts := &PlanOptions{SourceBasePath: tmp} + + ex := NewExecutor(plan, opts, w, slog.Default()) + + res, err := ex.Execute("user1") + if err != nil { + t.Fatalf("Execute err: %v", err) + } + + if res.ImportedCount != 1 || res.SkippedCount != 0 { + t.Fatalf("counts imported=%d skipped=%d", res.ImportedCount, res.SkippedCount) + } + if len(res.Items) != 1 || res.Items[0].Action != ExecutionActionCreated { + t.Fatalf("item result: %#v", res.Items) + } + if w.ensureCalls != 1 || w.updateCalls != 1 { + t.Fatalf("calls ensure=%d update=%d", w.ensureCalls, w.updateCalls) + } + + if w.lastUpdatedContent == nil { + t.Fatalf("expected content to be passed to UpdatePage") + } + if strings.Contains(*w.lastUpdatedContent, "title: X") || strings.Contains(*w.lastUpdatedContent, "---") { + t.Fatalf("frontmatter was not stripped, got: %q", *w.lastUpdatedContent) + } + if !strings.Contains(*w.lastUpdatedContent, "# Heading") { + t.Fatalf("expected body content, got: %q", *w.lastUpdatedContent) + } + + if res.TreeHashBefore != "h1" { + t.Fatalf("TreeHashBefore = %q", res.TreeHashBefore) + } + if res.TreeHash == "h1" { + t.Fatalf("expected TreeHash to change (fake changes it), got %q", res.TreeHash) + } +} + +func TestExecutor_Skip_DoesNotCallWiki(t *testing.T) { + tmp := t.TempDir() + w := &fakeExecWiki{hash: "h1"} + plan := &PlanResult{ + TreeHash: "h1", + Items: []PlanItem{ + {SourcePath: "a.md", TargetPath: "docs/a", Action: PlanActionSkip}, + }, + } + opts := &PlanOptions{SourceBasePath: tmp} + + ex := NewExecutor(plan, opts, w, slog.Default()) + res, err := ex.Execute("user1") + if err != nil { + t.Fatalf("Execute err: %v", err) + } + + if res.SkippedCount != 1 || res.ImportedCount != 0 { + t.Fatalf("counts imported=%d skipped=%d", res.ImportedCount, res.SkippedCount) + } + if w.ensureCalls != 0 || w.updateCalls != 0 { + t.Fatalf("expected no wiki calls, got ensure=%d update=%d", w.ensureCalls, w.updateCalls) + } +} + +func TestExecutor_Create_EnsurePathError_SkipsItem(t *testing.T) { + tmp := t.TempDir() + writeTmp(t, tmp, "a.md", "Body") + + w := &fakeExecWiki{ + hash: "h1", + ensureFn: func(userID, targetPath, title string, kind *tree.NodeKind) (*tree.Page, error) { + return nil, errors.New("boom") + }, + } + plan := &PlanResult{ + TreeHash: "h1", + Items: []PlanItem{ + {SourcePath: "a.md", TargetPath: "docs/a", Title: "A", Kind: tree.NodeKindPage, Action: PlanActionCreate}, + }, + } + opts := &PlanOptions{SourceBasePath: tmp} + + ex := NewExecutor(plan, opts, w, slog.Default()) + res, err := ex.Execute("user1") + if err != nil { + t.Fatalf("Execute err: %v", err) + } + if res.SkippedCount != 1 || res.ImportedCount != 0 { + t.Fatalf("counts imported=%d skipped=%d", res.ImportedCount, res.SkippedCount) + } + if res.Items[0].Error == nil || *res.Items[0].Error == "" { + t.Fatalf("expected error message") + } + if w.updateCalls != 0 { + t.Fatalf("UpdatePage should not be called") + } +} + +func TestExecutor_UnknownAction_SkipsItem(t *testing.T) { + tmp := t.TempDir() + w := &fakeExecWiki{hash: "h1"} + plan := &PlanResult{ + TreeHash: "h1", + Items: []PlanItem{ + {SourcePath: "a.md", TargetPath: "docs/a", Action: PlanActionUpdate}, // not handled in switch + }, + } + opts := &PlanOptions{SourceBasePath: tmp} + + ex := NewExecutor(plan, opts, w, slog.Default()) + res, err := ex.Execute("user1") + if err != nil { + t.Fatalf("Execute err: %v", err) + } + if res.SkippedCount != 1 { + t.Fatalf("SkippedCount=%d", res.SkippedCount) + } + if res.Items[0].Error == nil || *res.Items[0].Error != "unknown action" { + t.Fatalf("Error=%#v", res.Items[0].Error) + } +} diff --git a/internal/importer/fixtures/fixture-1.zip b/internal/importer/fixtures/fixture-1.zip new file mode 100644 index 0000000000000000000000000000000000000000..d95b4b32f7a4ceef7abb13936b39d395d853bdd6 GIT binary patch literal 1206 zcmWIWW@Zs#0D)y)7BOH3l;C7gU`R_%EGaEYE!GbW;bma2{8*A%1H>S@w1S&~k>v$5 z0|S@{02{->z`?K>$ruYp1_nW7V>0tnQY-XwQ^3X*0gWjJVi1jHoaqEdzas`buHS8s zhTr_omm~1z=7yf2=~0t!OnvOb?$;k|GTWoC{@&)#P3B(rdT*K3zPqzwQ()Ov`4mxq z9bUh)8$UkoN(^p(-x6{`Lep#wr=tT#3gL^NpVUU=HwR=ivP& z`|1=hR6)Uk2&j!n0mTlK5kU^9+|;7nL||ya!m1w_Mlg&PR=)j?d50Z1T)*2LoxW>p zQNLalhsiNRt;0(W94YjlUl^@-@G>YT)xl1DWLW9`K2;u`$@V>B#Rca ztlVZ>u%$0YuHv|$xQpQt36sC92nx3-**9z4hmM(Y7^*W4BH0k~|ncckP}h#$7>2_Wv!p`pl+(cJaxJyS(#~ z*1r!6>^p5@%LodL-LfGIRs#bAbPM$d>!BL_~uDs|)L*C)Ef@fd2bgnsXvHnxOde2R*u%)q "my-guides/intro" (SlugService.NormalizePath + NormalizeFilename) + tmp := t.TempDir() + writeFile(t, tmp, "My Guides/Intro.md", "# Intro") + + wiki := &fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}} + p := newPlannerWithFake(wiki) + + res, err := p.CreatePlan([]ImportMDFile{{SourcePath: "My Guides/Intro.md"}}, PlanOptions{ + SourceBasePath: tmp, + TargetBasePath: "docs", + }) + if err != nil { + t.Fatalf("CreatePlan err: %v", err) + } + if len(res.Errors) != 0 { + t.Fatalf("Errors = %#v", res.Errors) + } + if res.Items[0].TargetPath != "docs/my-guides/intro" { + t.Fatalf("TargetPath = %q (want docs/my-guides/intro)", res.Items[0].TargetPath) + } +} + +func TestPlanner_analyzeEntry_InvalidSourceDirSegment_ReturnsError(t *testing.T) { + // NormalizePath(validate=true) nutzt IsValidSlug() nach slug.Make(). + // Ein Segment wie "!!!" sluggt zu "" => invalid. + tmp := t.TempDir() + writeFile(t, tmp, "!!!/a.md", "# A") + + wiki := &fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}} + p := newPlannerWithFake(wiki) + + res, err := p.CreatePlan([]ImportMDFile{{SourcePath: "!!!/a.md"}}, PlanOptions{ + SourceBasePath: tmp, + TargetBasePath: "docs", + }) + if err != nil { + t.Fatalf("CreatePlan err: %v", err) + } + if len(res.Items) != 0 { + t.Fatalf("Items len = %d (want 0)", len(res.Items)) + } + if len(res.Errors) != 1 { + t.Fatalf("Errors len = %d (want 1)", len(res.Errors)) + } + // optional: grobe Assertion, dass es ein Validate-Fehler ist + if res.Errors[0] == "" { + t.Fatalf("unexpected error: %v", res.Errors[0]) + } +} diff --git a/internal/importer/zip_extractor.go b/internal/importer/zip_extractor.go new file mode 100644 index 00000000..970b2b30 --- /dev/null +++ b/internal/importer/zip_extractor.go @@ -0,0 +1,107 @@ +package importer + +import ( + "archive/zip" + "fmt" + "io" + "log/slog" + "os" + "path/filepath" + "strings" +) + +type ZipExtractor struct { + log *slog.Logger +} + +func NewZipExtractor() *ZipExtractor { + return &ZipExtractor{ + log: slog.Default().With("component", "ZipExtractor"), + } +} + +func (x *ZipExtractor) ExtractToTemp(zipPath string) (*ZipWorkspace, error) { + r, err := zip.OpenReader(zipPath) + if err != nil { + return nil, fmt.Errorf("open zip: %w", err) + } + defer r.Close() + + root, err := os.MkdirTemp("", "import-*") + if err != nil { + return nil, fmt.Errorf("mkdtemp: %w", err) + } + + ws := &ZipWorkspace{Root: root} + // Helper to clean up and return error + fail := func(e error) (*ZipWorkspace, error) { + if err = ws.Cleanup(); err != nil { + x.log.Error("cleanup failed", "error", err) + } + return nil, e + } + + for _, f := range r.File { + name := strings.TrimSpace(f.Name) + if name == "" { + continue + } + if f.FileInfo().IsDir() { + continue + } + + destPath, err := safeJoin(ws.Root, name) + if err != nil { + return fail(fmt.Errorf("invalid zip entry %q: %w", f.Name, err)) + } + + if err := os.MkdirAll(filepath.Dir(destPath), 0o755); err != nil { + return fail(fmt.Errorf("mkdir: %w", err)) + } + + // Extract single file in inner scope to ensure deterministic cleanup per iteration + if err := func() error { + rc, err := f.Open() + if err != nil { + return fmt.Errorf("open zip entry: %w", err) + } + defer rc.Close() + + out, err := os.OpenFile(destPath, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0o644) + if err != nil { + return fmt.Errorf("create file: %w", err) + } + defer func() { + if err := out.Close(); err != nil { + x.log.Error("close failed", "error", err) + } + }() + + if _, err := io.Copy(out, rc); err != nil { + return fmt.Errorf("write file: %w", err) + } + + return nil + }(); err != nil { + return fail(err) + } + } + + return ws, nil +} + +func safeJoin(baseDir, zipEntryName string) (string, error) { + clean := filepath.Clean(filepath.FromSlash(zipEntryName)) + if filepath.IsAbs(clean) { + return "", fmt.Errorf("absolute path not allowed") + } + dest := filepath.Join(baseDir, clean) + + baseClean := filepath.Clean(baseDir) + string(filepath.Separator) + destClean := filepath.Clean(dest) + + if !strings.HasPrefix(destClean+string(filepath.Separator), baseClean) { + return "", fmt.Errorf("path traversal detected: %q", zipEntryName) + } + return destClean, nil +} diff --git a/internal/importer/zip_extractor_test.go b/internal/importer/zip_extractor_test.go new file mode 100644 index 00000000..e9e89e52 --- /dev/null +++ b/internal/importer/zip_extractor_test.go @@ -0,0 +1,66 @@ +package importer + +import ( + "os" + "path/filepath" + "testing" +) + +func TestZipExtractor_ValidateExtractedFiles(t *testing.T) { + currentDir, err := os.Getwd() + if err != nil { + t.Fatalf("Failed to get current directory: %v", err) + } + zipPath := "fixtures/fixture-1.zip" + + extractor := NewZipExtractor() + ws, err := extractor.ExtractToTemp(filepath.Join(currentDir, zipPath)) + if err != nil { + t.Fatalf("ExtractToTemp failed: %v", err) + } + defer func() { + if err := ws.Cleanup(); err != nil { + t.Fatalf("Cleanup failed: %v", err) + } + }() + + // Check if expected files exist + expectedFiles := []string{ + "home.md", + "features/index.md", + "features/mermaind.md", + } + + for _, relPath := range expectedFiles { + fullPath := filepath.Join(ws.Root, relPath) + if _, err := os.Stat(fullPath); os.IsNotExist(err) { + t.Errorf("Expected file %s does not exist", relPath) + } + } +} + +func TestZipExtractor_Cleanup(t *testing.T) { + currentDir, err := os.Getwd() + if err != nil { + t.Fatalf("Failed to get current directory: %v", err) + } + zipPath := "fixtures/fixture-1.zip" + + extractor := NewZipExtractor() + ws, err := extractor.ExtractToTemp(filepath.Join(currentDir, zipPath)) + if err != nil { + t.Fatalf("ExtractToTemp failed: %v", err) + } + + workspaceRoot := ws.Root + + // Cleanup + if err := ws.Cleanup(); err != nil { + t.Fatalf("Cleanup failed: %v", err) + } + + // Verify cleanup + if _, err := os.Stat(workspaceRoot); !os.IsNotExist(err) { + t.Errorf("Workspace root %s still exists after cleanup", workspaceRoot) + } +} diff --git a/internal/importer/zip_workspace.go b/internal/importer/zip_workspace.go new file mode 100644 index 00000000..df880a0a --- /dev/null +++ b/internal/importer/zip_workspace.go @@ -0,0 +1,14 @@ +package importer + +import "os" + +type ZipWorkspace struct { + Root string +} + +func (ws *ZipWorkspace) Cleanup() error { + if ws == nil || ws.Root == "" { + return nil + } + return os.RemoveAll(ws.Root) +} diff --git a/internal/wiki/wiki.go b/internal/wiki/wiki.go index d7832211..025bfd29 100644 --- a/internal/wiki/wiki.go +++ b/internal/wiki/wiki.go @@ -235,6 +235,10 @@ func (w *Wiki) GetTree() *tree.PageNode { return w.tree.GetTree() } +func (w *Wiki) TreeHash() string { + return w.tree.TreeHash() +} + func (w *Wiki) CreatePage(userID string, parentID *string, title string, slug string, kind *tree.NodeKind) (*tree.Page, error) { ve := errors.NewValidationErrors() @@ -1019,3 +1023,7 @@ func (w *Wiki) DeleteBrandingFavicon() error { func (w *Wiki) GetBrandingService() *branding.BrandingService { return w.branding } + +func (w *Wiki) GetSlugService() *tree.SlugService { + return w.slug +} diff --git a/ui/leafwiki-ui/src/components/UserToolbar.tsx b/ui/leafwiki-ui/src/components/UserToolbar.tsx index 8d7197b9..06502e66 100644 --- a/ui/leafwiki-ui/src/components/UserToolbar.tsx +++ b/ui/leafwiki-ui/src/components/UserToolbar.tsx @@ -75,6 +75,12 @@ export default function UserToolbar() { > Branding Settings + navigate('/settings/importer')} + > + Import + -
-

Branding Settings

-
-

Site Name

-

+

+

Branding Settings

+
+

Site Name

+

The name displayed in the header, page titles, and login screen.

-
+
-
-

Logo

-

+

+

Logo

+

The logo displayed in the header next to the site name.

-
- - Current Logo: - +
+ Current Logo: {logoFile ? ( <> Logo
-
+
{' '} Upload Image -

+

Accepts {logoExts.map((ext) => ext.toUpperCase()).join(', ')}, max size {(maxLogoSize / (1024 * 1024)).toFixed(1)} MB

-
-

Favicon

-

+

+

Favicon

+

The icon displayed in the browser tab.

-
- - Current Favicon: - +
+ Current Favicon: {faviconFile ? ( <> {' '} Favicon
-
+
Upload Favicon -

+

Accepts {faviconExts.map((ext) => ext.toUpperCase()).join(', ')}, max size {(maxFaviconSize / (1024 * 1024)).toFixed(1)} MB

-
+
+
+ +
+ {importPlan && ( +
+

Import Plan

+ +
+
+ + + + + + + + + + + +
ID:{importPlan.id}
+ Tree Hash: + {importPlan.tree_hash}
+
+
+
+ )} + {importPlan && importPlan.items.length > 0 && ( + <> +
+

+ Items ({importPlan.items.length}) +

+
+
+
+ + + + + + + + + + + + + {importPlan.items.map((item) => ( + + + + + + + + + ))} + +
+ Source Path + + Target Path + TitleKind + Action + Notes
+ {item.source_path} + + {item.target_path} + + {item.title} + + + {item.kind} + + + + {item.action} + + + {item.notes ? item.notes.join(', ') : ''} +
+
+
+
+ +
+ + )} + {importResult && ( +
+

Import Result

+ +
+
+ + + + + + + + + + + + + + + +
+ Imported Count: + {importResult.imported_count}
+ Updated Count: + {importResult.updated_count}
+ Skipped Count: + {importResult.skipped_count}
+
+
+
+ )} + {importResult && importResult.items.length > 0 && ( +
+

+ Result Items ({importResult.items.length}) +

+
+
+ + + + + + + + + + + {importResult.items.map((item) => ( + + + + + + + ))} + +
+ Source Path + + Target Path + ActionError
+ {item.source_path} + + {item.target_path} + + + {item.action} + + + {item.error ?? ''} +
+
+
+
+ )} +
+ + ) +} diff --git a/ui/leafwiki-ui/src/features/importer/useToolbarActions.tsx b/ui/leafwiki-ui/src/features/importer/useToolbarActions.tsx new file mode 100644 index 00000000..6e129321 --- /dev/null +++ b/ui/leafwiki-ui/src/features/importer/useToolbarActions.tsx @@ -0,0 +1,13 @@ +// Hook to provide toolbar actions for the page viewer + +import { useEffect } from 'react' +import { useToolbarStore } from '../toolbar/toolbar' + +// Hook to set up toolbar actions based on app mode and read-only status +export function useToolbarActions() { + const setButtons = useToolbarStore((state) => state.setButtons) + + useEffect(() => { + setButtons([]) + }, [setButtons]) +} diff --git a/ui/leafwiki-ui/src/features/router/router.tsx b/ui/leafwiki-ui/src/features/router/router.tsx index d3cd7a9e..2cf0f0d0 100644 --- a/ui/leafwiki-ui/src/features/router/router.tsx +++ b/ui/leafwiki-ui/src/features/router/router.tsx @@ -2,6 +2,7 @@ import { createBrowserRouter, Navigate, RouteObject } from 'react-router-dom' import LoginForm from '../auth/LoginForm' import BrandingSettings from '../branding/BrandingSettings' import PageEditor from '../editor/PageEditor' +import Importer from '../importer/Importer' import RootRedirect from '../page/RootRedirect' import UserManagement from '../users/UserManagement' import PageViewer from '../viewer/PageViewer' @@ -50,6 +51,16 @@ export const createLeafWikiRouter = ( ), }, + { + path: '/settings/importer', + element: isReadOnlyViewer ? ( + + ) : ( + + + + ), + }, { path: '/e/*', element: isReadOnlyViewer ? ( diff --git a/ui/leafwiki-ui/src/features/users/UserManagement.tsx b/ui/leafwiki-ui/src/features/users/UserManagement.tsx index 899d652a..c512919c 100644 --- a/ui/leafwiki-ui/src/features/users/UserManagement.tsx +++ b/ui/leafwiki-ui/src/features/users/UserManagement.tsx @@ -30,45 +30,35 @@ export default function UserManagement() { return ( <> -
-

User Management

+
+

User Management

-
+
-
-
- - +
+
+
+ - - - - + + + + {loading && ( - )} {!loading && users.length === 0 && ( - @@ -76,26 +66,22 @@ export default function UserManagement() { {!loading && users.length > 0 && users.map((user) => ( - - - - + + + -
- Username - EmailRole - Actions - UsernameEmailRoleActions
+ Loading users...
+ No users found.
- {user.username} - - {user.email} - +
{user.username}{user.email} {user.role} -
+
+
diff --git a/ui/leafwiki-ui/src/index.css b/ui/leafwiki-ui/src/index.css index bea28afe..9c299923 100644 --- a/ui/leafwiki-ui/src/index.css +++ b/ui/leafwiki-ui/src/index.css @@ -935,71 +935,6 @@ @apply hover:bg-error/10; } - /* user management */ - .user-management { - @apply mx-auto max-w-4xl p-6; - } - - .user-management__title { - @apply text-interface-text mb-4 text-2xl font-bold; - } - - .user-management__header-actions { - @apply flex justify-end; - } - - .user-management__table-card { - @apply border-surface-border bg-surface mt-4 rounded-md border shadow-sm; - } - - .user-management__table-scroll { - @apply overflow-x-auto; - } - - .user-management__table { - @apply w-full text-sm; - } - - .user-management__table-head { - @apply bg-surface-alt text-left; - } - - .user-management__table-header-cell { - @apply text-interface-text p-3; - } - - .user-management__table-body-message { - @apply text-muted p-4 text-center; - } - - .user-management__table-row { - @apply border-surface-border border-t; - } - - .user-management__table-cell { - @apply text-interface-text p-3; - } - - .user-management__actions-cell { - @apply p-3; - } - - .user-management__actions { - @apply flex gap-2; - } - - .user-management__role-pill { - @apply rounded px-2 py-1 text-xs font-medium; - } - - .user-management__role-pill--admin { - @apply bg-brand/10 text-brand; - } - - .user-management__role-pill--default { - @apply bg-surface-alt text-interface-text; - } - .breadcrumbs-nav { @apply text-muted mr-2 flex w-full flex-1 grow text-sm; } @@ -1030,8 +965,7 @@ } .page-viewer { - @apply mx-auto w-full max-w-6xl px-8 pt-8 pb-0; - @apply flex min-h-full flex-col; + @apply mx-auto flex min-h-full w-full max-w-6xl flex-col px-8 pt-8 pb-0; } .page-viewer__body { @@ -1278,62 +1212,131 @@ @apply bg-destructive text-destructive-foreground hover:bg-destructive/90 shadow-sm; } - .branding-settings { - @apply mx-auto max-w-2xl p-6; + .settings { + @apply mx-auto w-full max-w-6xl px-8 pt-8 pb-4; } - .branding-settings__title { + .settings__title { @apply text-interface-text mb-6 text-2xl font-bold; } - .branding-settings__section { + .settings__section { @apply border-surface-border bg-surface mb-6 rounded-lg border p-6; } - .branding-settings__section-title { + .settings__section-title { @apply text-interface-text mb-2 text-lg font-semibold; } - .branding-settings__section-description { + .settings__section-description { @apply text-muted mb-4 text-sm; } - .branding-settings__field { + .settings__field { @apply mb-4; } - .branding-settings__preview { + .settings__preview { @apply bg-surface-alt mb-4 flex items-center gap-3 rounded-md p-4; } - .branding-settings__preview-label { + .settings__preview-label { @apply text-muted text-sm; } - .branding-settings__preview-emoji { + .settings__preview-emoji { @apply text-3xl; } - .branding-settings__preview-image { + .settings__preview-image { @apply h-10 w-auto; } - .branding-settings__preview-favicon { + .settings__preview-favicon { @apply h-8 w-8; } - .branding-settings__preview-placeholder { + .settings__preview-placeholder { @apply text-muted text-sm italic; } - .branding-settings__hint { + .settings__hint { @apply text-muted mt-1 text-xs; } - .branding-settings__actions { + .settings__actions { @apply flex justify-end gap-2; } + .settings__header-actions { + @apply flex justify-end; + } + + .settings__table-card { + @apply border-surface-border bg-surface mt-4 rounded-md border shadow-sm; + } + + .settings__table-scroll { + @apply overflow-x-auto; + } + + .settings__table { + @apply w-full text-sm; + } + + .importer .settings__table { + @apply text-xs; + } + + .settings__table-head { + @apply bg-surface-alt text-left; + } + + .settings__table-header-cell { + @apply text-interface-text p-3; + } + + .settings__table-body-message { + @apply text-muted p-4 text-center; + } + + .settings__table-row { + @apply border-surface-border border-t; + } + + .settings__table-cell { + @apply text-interface-text p-3; + } + + .settings__actions-cell { + @apply p-3; + } + + .settings__role-pill, + .settings__pill { + @apply rounded px-2 py-1 text-xs font-medium; + } + + .settings__pill-success { + @apply bg-success/10; + } + + .settings__pill-warning { + @apply bg-warning/10; + } + + .settings__pill-error { + @apply bg-error/10 text-error; + } + + .settings__role-pill--admin { + @apply bg-brand/10 text-brand; + } + + .settings__role-pill--default { + @apply bg-surface-alt text-interface-text; + } + .child-list__section { @apply border-surface-border bg-surface mb-2 rounded-lg border px-4 py-2; } diff --git a/ui/leafwiki-ui/src/lib/api/import.ts b/ui/leafwiki-ui/src/lib/api/import.ts new file mode 100644 index 00000000..7a244389 --- /dev/null +++ b/ui/leafwiki-ui/src/lib/api/import.ts @@ -0,0 +1,58 @@ +import { fetchWithAuth } from './auth' + +export type ImportPlan = { + id: string + tree_hash: string + items: ImportPlanItem[] + errors: string[] +} + +export type ImportPlanItem = { + source_path: string + target_path: string + title: string + desired_slug: string + kind: 'page' | 'section' + exists: boolean + existing_id: string | null + action: 'create' | 'update' | 'skip' + conflicts: string[] | null + notes: string[] | null +} + +export type ImportResult = { + imported_count: number + updated_count: number + skipped_count: number + items: { + source_path: string + target_path: string + action: 'created' | 'updated' | 'skipped' | 'conflicted' + error?: string + }[] + tree_hash: string + tree_hash_before: string +} + +export async function createImportPlanFromZip(file: File): Promise { + const formData = new FormData() + formData.append('file', file) + + return (await fetchWithAuth('/api/import/plan', { + method: 'POST', + body: formData, + headers: {}, // Let browser set Content-Type for FormData + })) as ImportPlan +} + +export async function getImportPlan(): Promise { + return (await fetchWithAuth('/api/import/plan', { + method: 'GET', + })) as ImportPlan +} + +export async function executeImportPlan(): Promise { + return (await fetchWithAuth('/api/import/execute', { + method: 'POST', + })) as ImportResult +} diff --git a/ui/leafwiki-ui/src/stores/import.ts b/ui/leafwiki-ui/src/stores/import.ts new file mode 100644 index 00000000..8c11bfc1 --- /dev/null +++ b/ui/leafwiki-ui/src/stores/import.ts @@ -0,0 +1,81 @@ +import * as importAPI from '@/lib/api/import' +import { toast } from 'sonner' +import { create } from 'zustand' +import { useTreeStore } from './tree' + +// Helper to normalize error messages from various error types +function getErrorMessage(err: unknown): string { + if (err instanceof Error) { + return err.message + } + if (typeof err === 'object' && err !== null) { + const errObj = err as Record + if (typeof errObj.error === 'string') { + return errObj.error + } + if (typeof errObj.message === 'string') { + return errObj.message + } + } + return String(err) +} + +type ImportStore = { + creatingImportPlan: boolean + executingImportPlan: boolean + importPlan: importAPI.ImportPlan | null + importResult: importAPI.ImportResult | null + createImportPlan: (sourcePath: File) => Promise + loadImportPlan: () => Promise + executeImportPlan: () => Promise +} + +export const useImportStore = create((set, get) => ({ + importPlan: null, + creatingImportPlan: false, + executingImportPlan: false, + importResult: null, + createImportPlan: async (sourcePath: File) => { + set({ creatingImportPlan: true, importPlan: null, importResult: null }) + try { + const importPlan = await importAPI.createImportPlanFromZip(sourcePath) + toast.success('Import plan created successfully') + set({ importPlan }) + } catch (err) { + toast.error('Failed to create import plan: ' + getErrorMessage(err)) + } finally { + set({ creatingImportPlan: false }) + } + }, + loadImportPlan: async () => { + set({ creatingImportPlan: true, importPlan: null, importResult: null }) + try { + const importPlan = await importAPI.getImportPlan() + set({ importPlan }) + } catch (err) { + toast.error('Failed to load import plan: ' + getErrorMessage(err)) + return + } finally { + set({ creatingImportPlan: false }) + } + }, + executeImportPlan: async () => { + const importPlan = get().importPlan + if (importPlan === null) { + toast.error('No import plan to execute') + return + } + try { + set({ executingImportPlan: true, importResult: null }) + const importResult = await importAPI.executeImportPlan() + toast.success('Import completed successfully') + set({ importPlan: null, importResult }) + } catch (err) { + toast.error('Failed to execute import plan: ' + getErrorMessage(err)) + } finally { + set({ executingImportPlan: false }) + // reload tree + useTreeStore.getState().reloadTree() + } + }, +})) From 6c5cdf52d419c6bce1cc8b04c21d164df3077de3 Mon Sep 17 00:00:00 2001 From: perber Date: Mon, 26 Jan 2026 20:56:13 +0100 Subject: [PATCH 09/11] feat: add reconstruct tree functionality (#632) --- cmd/leafwiki/main.go | 50 ++- internal/core/markdown/errors.go | 5 + .../core/{tree => markdown}/frontmatter.go | 46 ++- .../{tree => markdown}/frontmatter_test.go | 4 +- internal/core/markdown/markdown.go | 114 ++++++ internal/core/markdown/markdown_test.go | 92 +++++ internal/core/tools/reconstruct_tree.go | 10 + internal/core/tree/errors.go | 1 - internal/core/tree/node_store.go | 199 +++++++++- .../core/tree/node_store_reconstruct_test.go | 322 ++++++++++++++++ internal/core/tree/node_store_test.go | 12 +- internal/core/tree/tree_service.go | 79 +++- internal/core/tree/tree_service_test.go | 350 +++++++++++++++++- internal/http/router.go | 25 ++ internal/importer/executor.go | 10 +- internal/importer/planner.go | 97 ++--- internal/importer/planner_test.go | 145 ++++---- internal/search/sqlite_index.go | 4 +- internal/test_utils/common.go | 15 + readme.md | 67 ++++ 20 files changed, 1434 insertions(+), 213 deletions(-) create mode 100644 internal/core/markdown/errors.go rename internal/core/{tree => markdown}/frontmatter.go (73%) rename internal/core/{tree => markdown}/frontmatter_test.go (99%) create mode 100644 internal/core/markdown/markdown.go create mode 100644 internal/core/markdown/markdown_test.go create mode 100644 internal/core/tools/reconstruct_tree.go create mode 100644 internal/core/tree/node_store_reconstruct_test.go diff --git a/cmd/leafwiki/main.go b/cmd/leafwiki/main.go index 92e32284..dc3a3921 100644 --- a/cmd/leafwiki/main.go +++ b/cmd/leafwiki/main.go @@ -3,7 +3,6 @@ package main import ( "flag" "fmt" - "log" "log/slog" "os" "strings" @@ -21,6 +20,7 @@ func printUsage() { leafwiki --jwt-secret --admin-password [--host ] [--port ] [--data-dir ] leafwiki --disable-auth [--host ] [--port ] [--data-dir ] leafwiki reset-admin-password + leafwiki [--data-dir ] reconstruct-tree leafwiki --help Options: @@ -43,6 +43,7 @@ func printUsage() { LEAFWIKI_PORT LEAFWIKI_DATA_DIR LEAFWIKI_JWT_SECRET + LEAFWIKI_LOG_LEVEL LEAFWIKI_ADMIN_PASSWORD LEAFWIKI_PUBLIC_ACCESS LEAFWIKI_ALLOW_INSECURE @@ -56,8 +57,12 @@ func printUsage() { func setupLogger() { level := slog.LevelInfo - if os.Getenv("LOG_LEVEL") == "debug" { + if os.Getenv("LEAFWIKI_LOG_LEVEL") == "debug" { level = slog.LevelDebug + } else if (os.Getenv("LEAFWIKI_LOG_LEVEL")) == "error" { + level = slog.LevelError + } else if (os.Getenv("LEAFWIKI_LOG_LEVEL")) == "warn" { + level = slog.LevelWarn } handler := slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{ @@ -68,6 +73,11 @@ func setupLogger() { slog.SetDefault(slog.New(handler)) } +func fail(msg string, args ...any) { + slog.Default().Error(msg, args...) + os.Exit(1) +} + func main() { setupLogger() @@ -110,12 +120,28 @@ func main() { case "reset-admin-password": user, err := tools.ResetAdminPassword(dataDir) if err != nil { - log.Fatalf("Password reset failed: %v", err) + fail("Password reset failed", "error", err) } fmt.Println("Admin password reset successfully.") fmt.Printf("New password for user %s: %s\n", user.Username, user.Password) return + case "reconstruct-tree": + // Ensure data directory exists before reconstruction + if _, err := os.Stat(dataDir); err != nil { + if os.IsNotExist(err) { + if err := os.MkdirAll(dataDir, 0755); err != nil { + fail("Failed to create data directory", "error", err) + } + } else { + fail("Failed to access data directory", "error", err) + } + } + if err := tools.ReconstructTreeFromFS(dataDir); err != nil { + fail("Tree reconstruction failed", "error", err) + } + fmt.Println("Tree reconstructed successfully from filesystem.") + return case "--help", "-h", "help": printUsage() return @@ -128,27 +154,27 @@ func main() { if disableAuth { publicAccess = true - log.Printf("WARNING: Authentication disabled. Wiki is publicly accessible without authentication.") + slog.Default().Warn("Authentication disabled. Wiki is publicly accessible without authentication.") } if allowInsecure { - log.Printf("WARNING: allow-insecure enabled. Auth cookies may be transmitted over plain HTTP (INSECURE).") + slog.Default().Warn("allow-insecure enabled. Auth cookies may be transmitted over plain HTTP (INSECURE).") } // Check if data directory exists if _, err := os.Stat(dataDir); os.IsNotExist(err) { if err := os.MkdirAll(dataDir, 0755); err != nil { - log.Fatalf("Failed to create data directory: %v", err) + fail("Failed to create data directory", "error", err) } } if !disableAuth { if jwtSecret == "" { - log.Fatal("JWT secret is required. Set it using --jwt-secret or LEAFWIKI_JWT_SECRET environment variable.") + fail("JWT secret is required. Set it using --jwt-secret or LEAFWIKI_JWT_SECRET environment variable.") } if adminPassword == "" { - log.Fatalf("admin password is required. Set it using --admin-password or LEAFWIKI_ADMIN_PASSWORD environment variable.") + fail("admin password is required. Set it using --admin-password or LEAFWIKI_ADMIN_PASSWORD environment variable.") } } @@ -161,7 +187,7 @@ func main() { AuthDisabled: disableAuth, }) if err != nil { - log.Fatalf("Failed to initialize Wiki: %v", err) + fail("Failed to initialize Wiki", "error", err) } defer w.Close() @@ -180,7 +206,7 @@ func main() { // Start server if err := router.Run(listenAddr); err != nil { - log.Fatalf("Failed to start server: %v", err) + fail("Failed to start server", "error", err) } } @@ -208,7 +234,7 @@ func resolveBool(flagName string, flagVal bool, visited map[string]bool, envVar return b } // If env var is set but invalid, fail fast (helps operators) - log.Fatalf("Invalid value for %s: %q (expected true/false/1/0/yes/no)", envVar, env) + fail("Invalid environment variable value", "variable", envVar, "value", env, "expected", "true/false/1/0/yes/no") } return flagVal // default from flag } @@ -222,7 +248,7 @@ func resolveDuration(flagName string, flagVal time.Duration, visited map[string] return d } // If env var is set but invalid, fail fast (helps operators) - log.Fatalf("Invalid value for %s: %q (expected duration like 24h, 15m)", envVar, env) + fail("Invalid environment variable value", "variable", envVar, "value", env, "expected", "duration like 24h, 15m") } return flagVal // default from flag } diff --git a/internal/core/markdown/errors.go b/internal/core/markdown/errors.go new file mode 100644 index 00000000..7050ea07 --- /dev/null +++ b/internal/core/markdown/errors.go @@ -0,0 +1,5 @@ +package markdown + +import "errors" + +var ErrFrontmatterParse = errors.New("frontmatter parse error") diff --git a/internal/core/tree/frontmatter.go b/internal/core/markdown/frontmatter.go similarity index 73% rename from internal/core/tree/frontmatter.go rename to internal/core/markdown/frontmatter.go index defee8b5..7ac94fad 100644 --- a/internal/core/tree/frontmatter.go +++ b/internal/core/markdown/frontmatter.go @@ -1,8 +1,9 @@ -package tree +package markdown import ( "bytes" "errors" + "os" "strings" yaml "gopkg.in/yaml.v3" @@ -13,7 +14,43 @@ type Frontmatter struct { LeafWikiTitle string `yaml:"leafwiki_title,omitempty" json:"title,omitempty"` } -func SplitFrontmatter(md string) (yamlPart string, body string, has bool) { +func (fm *Frontmatter) LoadFrontMatterFromContent(yamlPart string) (has bool, err error) { + if err := yaml.Unmarshal([]byte(yamlPart), fm); err != nil { + return true, errors.Join(ErrFrontmatterParse, err) + } + + /** Check for title also in frontmatter **/ + type titleOnlyStruct struct { + Title string `yaml:"title,omitempty"` + } + var tos titleOnlyStruct + if err := yaml.Unmarshal([]byte(yamlPart), &tos); err == nil { + if tos.Title != "" { + fm.LeafWikiTitle = tos.Title + } + } + + fm.LeafWikiID = fm.stripSingleAndDoubleQuotes(fm.LeafWikiID) + fm.LeafWikiTitle = fm.stripSingleAndDoubleQuotes(fm.LeafWikiTitle) + + return true, nil +} + +func (fm *Frontmatter) stripSingleAndDoubleQuotes(s string) string { + s = strings.Trim(s, `"`) + s = strings.Trim(s, `'`) + return s +} + +func (fm *Frontmatter) LoadFrontMatterFromFile(mdFilePath string) (has bool, err error) { + content, err := os.ReadFile(mdFilePath) + if err != nil { + return false, err + } + return fm.LoadFrontMatterFromContent(string(content)) +} + +func splitFrontmatter(md string) (yamlPart string, body string, has bool) { // BOM-safe + normalize newlines s := strings.TrimPrefix(md, "\ufeff") s = strings.ReplaceAll(s, "\r\n", "\n") @@ -114,7 +151,7 @@ func SplitFrontmatter(md string) (yamlPart string, body string, has bool) { } func ParseFrontmatter(md string) (fm Frontmatter, body string, has bool, err error) { - yamlPart, body, has := SplitFrontmatter(md) + yamlPart, body, has := splitFrontmatter(md) if !has { return Frontmatter{}, md, false, nil } @@ -122,6 +159,9 @@ func ParseFrontmatter(md string) (fm Frontmatter, body string, has bool, err err if err := yaml.Unmarshal([]byte(yamlPart), &fm); err != nil { return Frontmatter{}, md, true, errors.Join(ErrFrontmatterParse, err) } + + fm.LeafWikiID = fm.stripSingleAndDoubleQuotes(fm.LeafWikiID) + fm.LeafWikiTitle = fm.stripSingleAndDoubleQuotes(fm.LeafWikiTitle) return fm, body, true, nil } diff --git a/internal/core/tree/frontmatter_test.go b/internal/core/markdown/frontmatter_test.go similarity index 99% rename from internal/core/tree/frontmatter_test.go rename to internal/core/markdown/frontmatter_test.go index 8791f509..bd704cd6 100644 --- a/internal/core/tree/frontmatter_test.go +++ b/internal/core/markdown/frontmatter_test.go @@ -1,4 +1,4 @@ -package tree +package markdown import ( "errors" @@ -94,7 +94,7 @@ func TestSplitFrontmatter(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - fm, body, has := SplitFrontmatter(tt.input) + fm, body, has := splitFrontmatter(tt.input) if has != tt.wantHas { t.Fatalf("has = %v, want %v", has, tt.wantHas) diff --git a/internal/core/markdown/markdown.go b/internal/core/markdown/markdown.go new file mode 100644 index 00000000..d34fde03 --- /dev/null +++ b/internal/core/markdown/markdown.go @@ -0,0 +1,114 @@ +package markdown + +import ( + "errors" + "os" + "path" + "path/filepath" + "strings" + + "github.com/perber/wiki/internal/core/shared" +) + +type MarkdownFile struct { + path string + content string + fm Frontmatter +} + +func LoadMarkdownFile(filePath string) (*MarkdownFile, error) { + if !strings.EqualFold(filepath.Ext(filePath), ".md") { + return nil, errors.New("file is not a markdown file") + } + + raw, err := os.ReadFile(filePath) + if err != nil { + return nil, err + } + + yamlPart, content, has := splitFrontmatter(string(raw)) + + var fm Frontmatter + + if has { + _, err = fm.LoadFrontMatterFromContent(string(yamlPart)) + if err != nil { + return nil, err + } + } else { + fm = Frontmatter{} + } + + return &MarkdownFile{ + path: filePath, + content: content, + fm: fm, + }, nil +} + +func NewMarkdownFile(filePath string, content string, fm Frontmatter) *MarkdownFile { + return &MarkdownFile{ + path: filePath, + content: content, + fm: fm, + } +} + +func (mf *MarkdownFile) WriteToFile() error { + fmContent, err := BuildMarkdownWithFrontmatter(mf.fm, string(mf.content)) + if err != nil { + return err + } + + mode := os.FileMode(0o644) + if st, err := os.Stat(mf.path); err == nil { + mode = st.Mode() + } + + return shared.WriteFileAtomic(mf.path, []byte(fmContent), mode) +} + +func (mf *MarkdownFile) GetTitle() (string, error) { + // 1. Frontmatter title + if mf.fm.LeafWikiTitle != "" { + return strings.TrimSpace(mf.fm.LeafWikiTitle), nil + } + + // 2. First heading + title, err := mf.extractTitleFromFirstHeading() + if err == nil && title != "" { + return title, nil + } + + // 3. Filename fallback + base := path.Base(mf.path) + name := strings.TrimSuffix(base, path.Ext(base)) + return name, nil +} + +func (mf *MarkdownFile) extractTitleFromFirstHeading() (string, error) { + lines := strings.Split(string(mf.content), "\n") + for _, line := range lines { + line = strings.TrimSpace(line) + if strings.HasPrefix(line, "# ") { + return strings.TrimSpace(strings.TrimPrefix(line, "# ")), nil + } + } + return "", errors.New("no heading found") +} + +func (mf *MarkdownFile) GetContent() string { + return string(mf.content) +} + +func (mf *MarkdownFile) GetPath() string { + return mf.path +} + +func (mf *MarkdownFile) GetFrontmatter() Frontmatter { + return mf.fm +} + +func (mf *MarkdownFile) SetFrontmatterID(id string) { + mf.fm.LeafWikiID = id +} diff --git a/internal/core/markdown/markdown_test.go b/internal/core/markdown/markdown_test.go new file mode 100644 index 00000000..8b669dca --- /dev/null +++ b/internal/core/markdown/markdown_test.go @@ -0,0 +1,92 @@ +package markdown + +import ( + "testing" + + "github.com/perber/wiki/internal/test_utils" +) + +func TestPlanner_extractTitleFromMDFile_FrontmatterTitleWins(t *testing.T) { + tmp := t.TempDir() + abs := test_utils.WriteFile(t, tmp, "t.md", "---\ntitle: FM Title\n---\n\n# Heading") + + mdFile, err := LoadMarkdownFile(abs) + if err != nil { + t.Fatalf("err: %v", err) + } + title, err := mdFile.GetTitle() + if err != nil { + t.Fatalf("err: %v", err) + } + if title != "FM Title" { + t.Fatalf("title = %q", title) + } +} + +func TestPlanner_extractTitleFromMDFile_LeafwikiTitle(t *testing.T) { + tmp := t.TempDir() + abs := test_utils.WriteFile(t, tmp, "t.md", "---\nleafwiki_title: Leaf\n---\n\n# Heading") + + mdFile, err := LoadMarkdownFile(abs) + if err != nil { + t.Fatalf("err: %v", err) + } + title, err := mdFile.GetTitle() + if err != nil { + t.Fatalf("err: %v", err) + } + if title != "Leaf" { + t.Fatalf("title = %q", title) + } +} + +func TestPlanner_extractTitleFromMDFile_FirstHeadingFallback(t *testing.T) { + tmp := t.TempDir() + abs := test_utils.WriteFile(t, tmp, "t.md", "no fm\n\n# Heading Only\nx") + + mdFile, err := LoadMarkdownFile(abs) + if err != nil { + t.Fatalf("err: %v", err) + } + title, err := mdFile.GetTitle() + if err != nil { + t.Fatalf("err: %v", err) + } + if title != "Heading Only" { + t.Fatalf("title = %q", title) + } +} + +func TestPlanner_extractTitleFromMDFile_FilenameFallback(t *testing.T) { + tmp := t.TempDir() + abs := test_utils.WriteFile(t, tmp, "some-file.md", "no title") + + mdFile, err := LoadMarkdownFile(abs) + if err != nil { + t.Fatalf("err: %v", err) + } + title, err := mdFile.GetTitle() + if err != nil { + t.Fatalf("err: %v", err) + } + if title != "some-file" { + t.Fatalf("title = %q", title) + } +} + +func TestLoadMarkdownFile_UppercaseExtension(t *testing.T) { + tmp := t.TempDir() + abs := test_utils.WriteFile(t, tmp, "README.MD", "# Uppercase Extension\n\nThis file has .MD extension") + + mdFile, err := LoadMarkdownFile(abs) + if err != nil { + t.Fatalf("expected no error for .MD extension, got: %v", err) + } + title, err := mdFile.GetTitle() + if err != nil { + t.Fatalf("err: %v", err) + } + if title != "Uppercase Extension" { + t.Fatalf("title = %q, want %q", title, "Uppercase Extension") + } +} diff --git a/internal/core/tools/reconstruct_tree.go b/internal/core/tools/reconstruct_tree.go new file mode 100644 index 00000000..726dc9b3 --- /dev/null +++ b/internal/core/tools/reconstruct_tree.go @@ -0,0 +1,10 @@ +package tools + +import ( + "github.com/perber/wiki/internal/core/tree" +) + +func ReconstructTreeFromFS(storageDir string) error { + treeService := tree.NewTreeService(storageDir) + return treeService.ReconstructTreeFromFS() +} diff --git a/internal/core/tree/errors.go b/internal/core/tree/errors.go index bf3a7e5c..63bb9693 100644 --- a/internal/core/tree/errors.go +++ b/internal/core/tree/errors.go @@ -13,7 +13,6 @@ var ErrPageAlreadyExists = errors.New("page already exists") var ErrMovePageCircularReference = errors.New("circular reference detected") var ErrPageCannotBeMovedToItself = errors.New("page cannot be moved to itself") var ErrInvalidSortOrder = errors.New("invalid sort order") -var ErrFrontmatterParse = errors.New("frontmatter parse error") var ErrFileNotFound = errors.New("file not found") var ErrDrift = errors.New("drift detected") var ErrInvalidOperation = errors.New("invalid operation") diff --git a/internal/core/tree/node_store.go b/internal/core/tree/node_store.go index f37d3732..f01fdc56 100644 --- a/internal/core/tree/node_store.go +++ b/internal/core/tree/node_store.go @@ -8,8 +8,10 @@ import ( "log/slog" "os" "path/filepath" + "sort" "strings" + "github.com/perber/wiki/internal/core/markdown" "github.com/perber/wiki/internal/core/shared" ) @@ -20,20 +22,30 @@ func fileExists(p string) bool { type ResolvedNode struct { Kind NodeKind - DirPath string // falls folder - FilePath string // falls file (oder folder/index.md) - HasContent bool // bei folder: index.md existiert? + DirPath string + FilePath string + HasContent bool } type NodeStore struct { storageDir string log *slog.Logger + slugger *SlugService } func NewNodeStore(storageDir string) *NodeStore { return &NodeStore{ storageDir: storageDir, log: slog.Default().With("component", "NodeStore"), + slugger: NewSlugService(), + } +} + +// writeIDToMarkdownFile writes a leafwiki_id to a markdown file's frontmatter and logs errors if the write fails +func (f *NodeStore) writeIDToMarkdownFile(mdFile *markdown.MarkdownFile, id string) { + mdFile.SetFrontmatterID(id) + if err := mdFile.WriteToFile(); err != nil { + f.log.Error("could not write leafwiki_id back to file", "path", mdFile.GetPath(), "error", err) } } @@ -79,6 +91,167 @@ func (f *NodeStore) LoadTree(filename string) (*PageNode, error) { return tree, nil } +func (f *NodeStore) ReconstructTreeFromFS() (*PageNode, error) { + root := &PageNode{ + ID: "root", + Slug: "root", + Title: "root", + Parent: nil, + Position: 0, + Children: []*PageNode{}, + Kind: NodeKindSection, + } + + rootDir := filepath.Join(f.storageDir, "root") + + info, err := os.Stat(rootDir) + if err != nil { + if os.IsNotExist(err) { + // No on-disk content yet; return an empty root tree. + return root, nil + } + return nil, fmt.Errorf("stat root dir %s: %w", rootDir, err) + } + + if !info.IsDir() { + return nil, fmt.Errorf("root path %s is not a directory", rootDir) + } + + if err := f.reconstructTreeRecursive(rootDir, root); err != nil { + return nil, fmt.Errorf("reconstruct tree from fs: %w", err) + } + + return root, nil +} +func (f *NodeStore) reconstructTreeRecursive(currentPath string, parent *PageNode) error { + entries, err := os.ReadDir(currentPath) + if err != nil { + return fmt.Errorf("read dir %s: %w", currentPath, err) + } + + // stable, deterministic ordering (case-insensitive, with case-sensitive tie-breaker) + sort.SliceStable(entries, func(i, j int) bool { + li := strings.ToLower(entries[i].Name()) + lj := strings.ToLower(entries[j].Name()) + if li == lj { + return entries[i].Name() < entries[j].Name() + } + return li < lj + }) + + for _, entry := range entries { + name := entry.Name() + + // optional: skip hidden stuff + if strings.HasPrefix(name, ".") { + continue + } + + // defaults + title := name + id, err := shared.GenerateUniqueID() + if err != nil { + return fmt.Errorf("generate unique ID: %w", err) + } + + if entry.IsDir() { + // Normalize and validate the directory name as a slug + normalizedSlug := normalizeSlug(name) + if err := f.slugger.IsValidSlug(normalizedSlug); err != nil { + f.log.Error("skipping directory with invalid slug", "directory", name, "normalized", normalizedSlug, "error", err) + continue + } + + indexPath := filepath.Join(currentPath, name, "index.md") + if fileExists(indexPath) { + mdFile, err := markdown.LoadMarkdownFile(indexPath) + if err != nil { + f.log.Error("could not load index.md", "path", indexPath, "error", err) + // fall back to default title and generated ID, but still add the section and recurse + } else { + title, err = mdFile.GetTitle() + if err != nil { + f.log.Error("could not extract title from index.md", "path", indexPath, "error", err) + // keep default title; still add the section and recurse + } + if mdFile.GetFrontmatter().LeafWikiID != "" { + id = mdFile.GetFrontmatter().LeafWikiID + } else { + // Generated ID needs to be written back + f.writeIDToMarkdownFile(mdFile, id) + } + } + } + + child := &PageNode{ + ID: id, + Slug: normalizedSlug, + Title: title, + Parent: parent, + Position: len(parent.Children), + Children: []*PageNode{}, + Kind: NodeKindSection, + } + parent.Children = append(parent.Children, child) + + if err := f.reconstructTreeRecursive(filepath.Join(currentPath, name), child); err != nil { + return err + } + continue + } + + // file + ext := filepath.Ext(name) + if !strings.EqualFold(ext, ".md") { + continue + } + + // Normalize and validate the filename (without .md) as a slug + baseFilename := strings.TrimSuffix(name, ext) + // skip index.md (handled by section case) + if strings.EqualFold(baseFilename, "index") { + continue + } + normalizedSlug := normalizeSlug(baseFilename) + if err := f.slugger.IsValidSlug(normalizedSlug); err != nil { + f.log.Error("skipping file with invalid slug", "file", name, "normalized", normalizedSlug, "error", err) + continue + } + + filePath := filepath.Join(currentPath, name) + + mdFile, err := markdown.LoadMarkdownFile(filePath) + if err != nil { + f.log.Error("could not load markdown file", "path", filePath, "error", err) + continue + } + title, err = mdFile.GetTitle() + if err != nil { + f.log.Error("could not extract title from file", "path", filePath, "error", err) + continue + } + if mdFile.GetFrontmatter().LeafWikiID != "" { + id = mdFile.GetFrontmatter().LeafWikiID + } else { + // Generated ID needs to be written back + f.writeIDToMarkdownFile(mdFile, id) + } + + child := &PageNode{ + ID: id, + Slug: normalizedSlug, + Title: title, + Parent: parent, + Position: len(parent.Children), + Children: nil, + Kind: NodeKindPage, + } + parent.Children = append(parent.Children, child) + } + + return nil +} + func (f *NodeStore) assignParentToChildren(parent *PageNode) { for _, child := range parent.Children { child.Parent = parent @@ -147,8 +320,8 @@ func (f *NodeStore) CreatePage(parentEntry *PageNode, newEntry *PageNode) error } // Build and write file - fm := Frontmatter{LeafWikiID: newEntry.ID} - md, err := BuildMarkdownWithFrontmatter(fm, "# "+newEntry.Title+"\n") + fm := markdown.Frontmatter{LeafWikiID: newEntry.ID} + md, err := markdown.BuildMarkdownWithFrontmatter(fm, "# "+newEntry.Title+"\n") if err != nil { return fmt.Errorf("could not build markdown with frontmatter: %w", err) } @@ -228,8 +401,8 @@ func (f *NodeStore) UpsertContent(entry *PageNode, content string) error { } // Update the file content - fm := Frontmatter{LeafWikiID: strings.TrimSpace(entry.ID), LeafWikiTitle: strings.TrimSpace(entry.Title)} - contentWithFM, err := BuildMarkdownWithFrontmatter(fm, content) + fm := markdown.Frontmatter{LeafWikiID: strings.TrimSpace(entry.ID), LeafWikiTitle: strings.TrimSpace(entry.Title)} + contentWithFM, err := markdown.BuildMarkdownWithFrontmatter(fm, content) if err != nil { return fmt.Errorf("could not build markdown with frontmatter: %w", err) } @@ -519,7 +692,7 @@ func (f *NodeStore) ReadPageContent(entry *PageNode) (string, error) { if err != nil { return "", err } - _, content, _, err := ParseFrontmatter(string(raw)) + _, content, _, err := markdown.ParseFrontmatter(string(raw)) if err != nil { return string(raw), err } @@ -554,19 +727,19 @@ func (f *NodeStore) SyncFrontmatterIfExists(entry *PageNode) error { return fmt.Errorf("read content file: %w", err) } - fm, body, has, err := ParseFrontmatter(string(raw)) + fm, body, has, err := markdown.ParseFrontmatter(string(raw)) if err != nil { return fmt.Errorf("parse frontmatter: %w", err) } if !has { - fm = Frontmatter{} + fm = markdown.Frontmatter{} } // Tree-SoT invariants fm.LeafWikiID = strings.TrimSpace(entry.ID) fm.LeafWikiTitle = strings.TrimSpace(entry.Title) - out, err := BuildMarkdownWithFrontmatter(fm, body) + out, err := markdown.BuildMarkdownWithFrontmatter(fm, body) if err != nil { return fmt.Errorf("build markdown: %w", err) } @@ -759,8 +932,8 @@ func (f *NodeStore) ConvertNode(entry *PageNode, target NodeKind) error { return fmt.Errorf("could not move index to page: %w", err) } } else { - fm := Frontmatter{LeafWikiID: entry.ID, LeafWikiTitle: entry.Title} - md, err := BuildMarkdownWithFrontmatter(fm, "") + fm := markdown.Frontmatter{LeafWikiID: entry.ID, LeafWikiTitle: entry.Title} + md, err := markdown.BuildMarkdownWithFrontmatter(fm, "") if err != nil { return err } diff --git a/internal/core/tree/node_store_reconstruct_test.go b/internal/core/tree/node_store_reconstruct_test.go new file mode 100644 index 00000000..6c316a77 --- /dev/null +++ b/internal/core/tree/node_store_reconstruct_test.go @@ -0,0 +1,322 @@ +package tree + +import ( + "path/filepath" + "sort" + "strings" + "testing" + + "github.com/perber/wiki/internal/core/markdown" +) + +func findChildBySlug(t *testing.T, parent *PageNode, slug string) *PageNode { + t.Helper() + for _, ch := range parent.Children { + if ch.Slug == slug { + return ch + } + } + t.Fatalf("child with slug %q not found under %q", slug, parent.Slug) + return nil +} + +func slugs(children []*PageNode) []string { + out := make([]string, 0, len(children)) + for _, c := range children { + out = append(out, c.Slug) + } + return out +} + +// --- tests --- + +func TestNodeStore_ReconstructTreeFromFS_EmptyStorage_ReturnsRoot(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + tree, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS: %v", err) + } + + if tree == nil || tree.ID != "root" || tree.Kind != NodeKindSection { + t.Fatalf("unexpected root: %#v", tree) + } + if tree.Parent != nil { + t.Fatalf("expected root parent nil") + } + if len(tree.Children) != 0 { + t.Fatalf("expected root to have no children, got %d", len(tree.Children)) + } +} + +func TestNodeStore_ReconstructTreeFromFS_BuildsSectionsAndPages_SkipsIndexMdAsPage(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + // FS layout: + // /docs/index.md (section content) + // /docs/intro.md (page) + // /readme.md (page at root) + mustMkdir(t, filepath.Join(tmp, "root", "docs")) + + secIndex := `--- +leafwiki_id: sec-docs +leafwiki_title: Documentation +--- +# Section` + mustWriteFile(t, filepath.Join(tmp, "root", "docs", "index.md"), secIndex, 0o644) + + pageIntro := `--- +leafwiki_id: page-intro +leafwiki_title: Introduction +--- +# Intro` + mustWriteFile(t, filepath.Join(tmp, "root", "docs", "intro.md"), pageIntro, 0o644) + + rootPage := `--- +leafwiki_id: page-readme +leafwiki_title: Readme +--- +# Readme` + mustWriteFile(t, filepath.Join(tmp, "root", "readme.md"), rootPage, 0o644) + + tree, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS: %v", err) + } + + // root has: docs(section), readme(page) + docs := findChildBySlug(t, tree, "docs") + if docs.Kind != NodeKindSection { + t.Fatalf("expected docs to be section, got %q", docs.Kind) + } + // section title/id from index frontmatter + if docs.ID != "sec-docs" { + t.Fatalf("expected docs.ID=sec-docs, got %q", docs.ID) + } + if docs.Title != "Documentation" { + t.Fatalf("expected docs.Title=Documentation, got %q", docs.Title) + } + + // ensure index.md wasn't turned into a page child + for _, ch := range docs.Children { + if ch.Slug == "index" { + t.Fatalf("index.md must be skipped as page, but found slug index") + } + } + + intro := findChildBySlug(t, docs, "intro") + if intro.Kind != NodeKindPage { + t.Fatalf("expected intro to be page, got %q", intro.Kind) + } + // page title/id from frontmatter + if intro.ID != "page-intro" { + t.Fatalf("expected intro.ID=page-intro, got %q", intro.ID) + } + if intro.Title != "Introduction" { + t.Fatalf("expected intro.Title=Introduction, got %q", intro.Title) + } + + readme := findChildBySlug(t, tree, "readme") + if readme.Kind != NodeKindPage { + t.Fatalf("expected readme to be page, got %q", readme.Kind) + } + if readme.ID != "page-readme" { + t.Fatalf("expected readme.ID=page-readme, got %q", readme.ID) + } + if readme.Title != "Readme" { + t.Fatalf("expected readme.Title=Readme, got %q", readme.Title) + } + + // parent pointers + if docs.Parent == nil || docs.Parent.ID != "root" { + t.Fatalf("expected docs parent root, got %#v", docs.Parent) + } + if intro.Parent == nil || intro.Parent.ID != docs.ID { + t.Fatalf("expected intro parent docs, got %#v", intro.Parent) + } +} + +func TestNodeStore_ReconstructTreeFromFS_SectionWithoutIndex_UsesDirNameAsTitle(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + // FS: /emptysec/ (no index.md) + mustMkdir(t, filepath.Join(tmp, "root", "emptysec")) + + tree, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS: %v", err) + } + + sec := findChildBySlug(t, tree, "emptysec") + if sec.Kind != NodeKindSection { + t.Fatalf("expected section, got %q", sec.Kind) + } + // title defaults to folder name (per your code) + if sec.Title != "emptysec" { + t.Fatalf("expected title=emptysec, got %q", sec.Title) + } + if strings.TrimSpace(sec.ID) == "" { + t.Fatalf("expected some generated id, got empty") + } +} + +func TestNodeStore_ReconstructTreeFromFS_PageWithoutFrontmatter_FallsBackToHeadlineTitle(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + // FS: /plain.md (no fm) + mustWriteFile(t, filepath.Join(tmp, "root", "plain.md"), "# hello\n", 0o644) + + tree, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS: %v", err) + } + + p := findChildBySlug(t, tree, "plain") + if p.Kind != NodeKindPage { + t.Fatalf("expected page, got %q", p.Kind) + } + + // title fallback should be headline + if p.Title != "hello" { + t.Fatalf("expected title fallback to slug 'plain', got %q", p.Title) + } + if strings.TrimSpace(p.ID) == "" { + // should still have generated id (unless you later decide to keep empty) + t.Fatalf("expected generated id, got empty") + } +} + +func TestNodeStore_ReconstructTreeFromFS_PositionsAreContiguous(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + // Create several files/dirs + mustWriteFile(t, filepath.Join(tmp, "root", "b.md"), "# b", 0o644) + mustWriteFile(t, filepath.Join(tmp, "root", "a.md"), "# a", 0o644) + mustMkdir(t, filepath.Join(tmp, "root", "zsec")) + + tree, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS: %v", err) + } + + // Positions should be 0..n-1 regardless of order + seen := make([]int, 0, len(tree.Children)) + for _, ch := range tree.Children { + seen = append(seen, ch.Position) + } + sort.Ints(seen) + for i := range seen { + if seen[i] != i { + t.Fatalf("expected contiguous positions 0..%d, got %v (slugs=%v)", len(seen)-1, seen, slugs(tree.Children)) + } + } +} + + +func TestNodeStore_ReconstructTreeFromFS_WritesIDsBackToFiles(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + // Create files without leafwiki_id in frontmatter + mustWriteFile(t, filepath.Join(tmp, "root", "no-id.md"), "# No ID", 0o644) + mustMkdir(t, filepath.Join(tmp, "root", "section")) + mustWriteFile(t, filepath.Join(tmp, "root", "section", "index.md"), "# Section No ID", 0o644) + + // Run reconstruction + tree, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS: %v", err) + } + + // Get the page and section nodes + page := findChildBySlug(t, tree, "no-id") + section := findChildBySlug(t, tree, "section") + + // Verify that IDs were generated + if page.ID == "" { + t.Fatalf("expected page to have generated ID, got empty") + } + if section.ID == "" { + t.Fatalf("expected section to have generated ID, got empty") + } + + // Now reload the files and check that IDs were written back + pageMd, err := markdown.LoadMarkdownFile(filepath.Join(tmp, "root", "no-id.md")) + if err != nil { + t.Fatalf("failed to reload page: %v", err) + } + if pageMd.GetFrontmatter().LeafWikiID != page.ID { + t.Fatalf("expected page frontmatter ID=%q, got %q", page.ID, pageMd.GetFrontmatter().LeafWikiID) + } + + sectionMd, err := markdown.LoadMarkdownFile(filepath.Join(tmp, "root", "section", "index.md")) + if err != nil { + t.Fatalf("failed to reload section index: %v", err) + } + if sectionMd.GetFrontmatter().LeafWikiID != section.ID { + t.Fatalf("expected section frontmatter ID=%q, got %q", section.ID, sectionMd.GetFrontmatter().LeafWikiID) + } + + // Run reconstruction again and verify IDs are stable (deterministic) + tree2, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("second ReconstructTreeFromFS: %v", err) + } + + page2 := findChildBySlug(t, tree2, "no-id") + section2 := findChildBySlug(t, tree2, "section") + + if page2.ID != page.ID { + t.Fatalf("expected deterministic page ID on second run: first=%q, second=%q", page.ID, page2.ID) + } + if section2.ID != section.ID { + t.Fatalf("expected deterministic section ID on second run: first=%q, second=%q", section.ID, section2.ID) + } +} + +func TestNodeStore_ReconstructTreeFromFS_SkipsInvalidSlugs(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + // Create files and directories with invalid slug names + // Uppercase letters should be normalized + mustWriteFile(t, filepath.Join(tmp, "root", "Valid Page.md"), "# Valid", 0o644) + mustWriteFile(t, filepath.Join(tmp, "root", "UPPERCASE.md"), "# Upper", 0o644) + mustMkdir(t, filepath.Join(tmp, "root", "Valid Section")) + mustWriteFile(t, filepath.Join(tmp, "root", "Valid Section", "index.md"), "# Section", 0o644) + + // Create a valid file to ensure the test still works + mustWriteFile(t, filepath.Join(tmp, "root", "valid.md"), "# Valid", 0o644) + + tree, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS: %v", err) + } + + // The valid file should be present with normalized slug + valid := findChildBySlug(t, tree, "valid") + if valid == nil { + t.Fatalf("expected valid page to be present") + } + + // Files with spaces and uppercase should be normalized + validPage := findChildBySlug(t, tree, "valid-page") + if validPage == nil { + t.Fatalf("expected 'Valid Page.md' to be normalized to 'valid-page'") + } + + uppercase := findChildBySlug(t, tree, "uppercase") + if uppercase == nil { + t.Fatalf("expected 'UPPERCASE.md' to be normalized to 'uppercase'") + } + + validSection := findChildBySlug(t, tree, "valid-section") + if validSection == nil { + t.Fatalf("expected 'Valid Section' directory to be normalized to 'valid-section'") + } +} \ No newline at end of file diff --git a/internal/core/tree/node_store_test.go b/internal/core/tree/node_store_test.go index b4b045da..acb02c32 100644 --- a/internal/core/tree/node_store_test.go +++ b/internal/core/tree/node_store_test.go @@ -7,6 +7,8 @@ import ( "runtime" "strings" "testing" + + "github.com/perber/wiki/internal/core/markdown" ) func mustWriteFile(t *testing.T, path string, data string, perm os.FileMode) { @@ -166,7 +168,7 @@ func TestNodeStore_CreatePage_CreatesMarkdownWithFrontmatter(t *testing.T) { t.Fatalf("read created page: %v", err) } - fm, body, has, err := ParseFrontmatter(string(raw)) + fm, body, has, err := markdown.ParseFrontmatter(string(raw)) if err != nil { t.Fatalf("ParseFrontmatter: %v", err) } @@ -230,7 +232,7 @@ func TestNodeStore_UpsertContent_Page_CreatesOrUpdates_PreservesMode(t *testing. } raw, _ := os.ReadFile(path) - fm, body, has, err := ParseFrontmatter(string(raw)) + fm, body, has, err := markdown.ParseFrontmatter(string(raw)) if err != nil { t.Fatalf("ParseFrontmatter: %v", err) } @@ -439,7 +441,7 @@ func TestNodeStore_SyncFrontmatterIfExists_Page_UpdatesOrAddsFM(t *testing.T) { } raw := string(mustRead(t, path)) - fm, body, has, err := ParseFrontmatter(raw) + fm, body, has, err := markdown.ParseFrontmatter(raw) if err != nil { t.Fatalf("ParseFrontmatter: %v", err) } @@ -460,7 +462,7 @@ func TestNodeStore_SyncFrontmatterIfExists_Page_UpdatesOrAddsFM(t *testing.T) { t.Fatalf("SyncFrontmatterIfExists(update): %v", err) } raw2 := string(mustRead(t, path)) - fm2, body2, has2, err := ParseFrontmatter(raw2) + fm2, body2, has2, err := markdown.ParseFrontmatter(raw2) if err != nil { t.Fatalf("ParseFrontmatter: %v", err) } @@ -614,7 +616,7 @@ func TestNodeStore_ConvertNode_SectionToPage_NoIndex_CreatesEmptyPageWithFM(t *t pageFile := filepath.Join(tmp, "root", "docs.md") raw := string(mustRead(t, pageFile)) - fm, _, has, err := ParseFrontmatter(raw) + fm, _, has, err := markdown.ParseFrontmatter(raw) if err != nil { t.Fatalf("ParseFrontmatter: %v", err) } diff --git a/internal/core/tree/tree_service.go b/internal/core/tree/tree_service.go index f5a487ce..07c04161 100644 --- a/internal/core/tree/tree_service.go +++ b/internal/core/tree/tree_service.go @@ -10,6 +10,7 @@ import ( "sync" "time" + "github.com/perber/wiki/internal/core/markdown" "github.com/perber/wiki/internal/core/shared" ) @@ -100,7 +101,16 @@ func (t *TreeService) migrate(fromVersion int) error { } func (t *TreeService) migrateToV1() error { - // Backfill metadata for all pages + if t.tree == nil { + return ErrTreeNotLoaded + } + + return t.backfillMetadataLocked() +} + +// backfillMetadataLocked backfills CreatedAt and UpdatedAt timestamps for all nodes from filesystem +// The caller must ensure that t.tree is not nil and must hold the appropriate lock before calling this method +func (t *TreeService) backfillMetadataLocked() error { var backfillMetadata func(node *PageNode) error backfillMetadata = func(node *PageNode) error { // If CreatedAt is already set, assume metadata was backfilled and skip @@ -155,10 +165,6 @@ func (t *TreeService) migrateToV1() error { return nil } - if t.tree == nil { - return ErrTreeNotLoaded - } - return backfillMetadata(t.tree) } @@ -193,7 +199,7 @@ func (t *TreeService) migrateToV2() error { } // Parse the frontmatter - fm, body, has, err := ParseFrontmatter(content) + fm, body, has, err := markdown.ParseFrontmatter(content) if err != nil { t.log.Error("Could not parse frontmatter for node", "nodeID", node.ID, "error", err) return fmt.Errorf("could not parse frontmatter for node %s: %w", node.ID, err) @@ -204,7 +210,7 @@ func (t *TreeService) migrateToV2() error { // If there is no frontmatter, start with a new one if !has { - fm = Frontmatter{} + fm = markdown.Frontmatter{} changed = true } @@ -222,7 +228,7 @@ func (t *TreeService) migrateToV2() error { // Only write if changed if changed { - newContent, err := BuildMarkdownWithFrontmatter(fm, body) + newContent, err := markdown.BuildMarkdownWithFrontmatter(fm, body) if err != nil { t.log.Error("could not build markdown with frontmatter", "nodeID", node.ID, "error", err) return fmt.Errorf("could not build markdown with frontmatter for node %s: %w", node.ID, err) @@ -324,6 +330,12 @@ func (t *TreeService) SaveTree() error { return t.withLockedTree(t.saveTreeLocked) } +// saveTreeLocked saves the tree to the storage directory +func (t *TreeService) saveTreeLocked() error { + return t.store.SaveTree(t.treeFilename, t.tree) +} + +// TreeHash returns the current hash of the tree func (t *TreeService) TreeHash() string { var hash string _ = t.withRLockedTree(func() error { @@ -333,9 +345,54 @@ func (t *TreeService) TreeHash() string { return hash } -func (t *TreeService) saveTreeLocked() error { - // Save the tree to the storage directory - return t.store.SaveTree(t.treeFilename, t.tree) +// ReconstructTreeFromFS reconstructs the tree from the filesystem +func (t *TreeService) ReconstructTreeFromFS() error { + return t.withLockedTree(t.reconstructTreeFromFSLocked) +} + +func (t *TreeService) reconstructTreeFromFSLocked() error { + // Reconstruct the tree from the filesystem + // This is a more complex operation and may involve reading the filesystem structure + newTree, err := t.store.ReconstructTreeFromFS() + if err != nil { + t.log.Error("Error reconstructing tree from filesystem", "error", err) + return err + } + + // Defensive check to protect against unexpected nil returns from ReconstructTreeFromFS + if newTree == nil { + return fmt.Errorf("internal error: ReconstructTreeFromFS returned nil tree") + } + + // Save the old tree in case we need to revert + // Note: oldTree may be nil if this is the first reconstruction (which is expected) + oldTree := t.tree + t.tree = newTree + + // Backfill metadata for all nodes + if err := t.backfillMetadataLocked(); err != nil { + t.log.Error("Error backfilling metadata after reconstruction", "error", err) + // Revert tree assignment on failure (may set back to nil, which is fine) + t.tree = oldTree + return err + } + + // Save the tree + if err := t.saveTreeLocked(); err != nil { + t.log.Error("Error saving tree after reconstruction", "error", err) + // Revert tree assignment on failure (may set back to nil, which is fine) + t.tree = oldTree + return err + } + + // Update the schema version to prevent unnecessary migrations on next startup + if err := saveSchema(t.storageDir, CurrentSchemaVersion); err != nil { + t.log.Error("Error saving schema after reconstruction", "error", err) + // Note: We don't revert the tree here since it was already saved successfully + return err + } + + return nil } // Create Node adds a new node to the tree diff --git a/internal/core/tree/tree_service_test.go b/internal/core/tree/tree_service_test.go index 2e0389d9..384a99ad 100644 --- a/internal/core/tree/tree_service_test.go +++ b/internal/core/tree/tree_service_test.go @@ -6,6 +6,8 @@ import ( "path/filepath" "strings" "testing" + + "github.com/perber/wiki/internal/core/markdown" ) // --- helpers --- @@ -132,7 +134,7 @@ func TestTreeService_CreateNode_Page_Root_CreatesFileAndFrontmatter(t *testing.T t.Fatalf("read file: %v", err) } - fm, _, has, err := ParseFrontmatter(string(raw)) + fm, _, has, err := markdown.ParseFrontmatter(string(raw)) if err != nil { t.Fatalf("ParseFrontmatter: %v", err) } @@ -206,7 +208,7 @@ func TestTreeService_UpdateNode_TitleOnly_SyncsFrontmatterIfFileExists(t *testin if err != nil { t.Fatalf("read: %v", err) } - fm, _, has, err := ParseFrontmatter(string(raw)) + fm, _, has, err := markdown.ParseFrontmatter(string(raw)) if err != nil { t.Fatalf("ParseFrontmatter: %v", err) } @@ -695,7 +697,7 @@ func TestTreeService_LoadTree_MigratesToV2_AddsFrontmatterAndPreservesBody(t *te t.Fatalf("read migrated file: %v", err) } - fm, migratedBody, has, err := ParseFrontmatter(string(raw)) + fm, migratedBody, has, err := markdown.ParseFrontmatter(string(raw)) if err != nil { t.Fatalf("ParseFrontmatter: %v", err) } @@ -713,6 +715,348 @@ func TestTreeService_LoadTree_MigratesToV2_AddsFrontmatterAndPreservesBody(t *te } } +// TestTreeService_ReconstructTreeFromFS_UpdatesSchemaVersion verifies that +// ReconstructTreeFromFS writes the current schema version to prevent unnecessary migrations +func TestTreeService_ReconstructTreeFromFS_UpdatesSchemaVersion(t *testing.T) { + tmpDir := t.TempDir() + + // Create a minimal file structure for reconstruction + mustMkdir(t, filepath.Join(tmpDir, "root")) + mustWriteFile(t, filepath.Join(tmpDir, "root", "test.md"), "# Test Page", 0o644) + + // Create service WITHOUT schema.json (simulating an old/missing schema) + svc := NewTreeService(tmpDir) + + // Reconstruct the tree (no prior tree loaded) + if err := svc.ReconstructTreeFromFS(); err != nil { + t.Fatalf("ReconstructTreeFromFS failed: %v", err) + } + + // Verify schema.json was created with current version + schema, err := loadSchema(tmpDir) + if err != nil { + t.Fatalf("loadSchema failed: %v", err) + } + + if schema.Version != CurrentSchemaVersion { + t.Errorf("expected schema version %d after reconstruction, got %d", CurrentSchemaVersion, schema.Version) + } + + // Verify tree.json was also created + mustStat(t, filepath.Join(tmpDir, "tree.json")) +} + +// --- G) ReconstructTreeFromFS --- + +func TestTreeService_ReconstructTreeFromFS_BackfillsMetadata(t *testing.T) { + svc, tmpDir := newLoadedService(t) + + // Create some files on disk manually (simulating external changes) + mustWriteFile(t, filepath.Join(tmpDir, "root", "page1.md"), `--- +leafwiki_id: page-1 +leafwiki_title: Page One +--- +# Page One`, 0o644) + + mustMkdir(t, filepath.Join(tmpDir, "root", "section1")) + mustWriteFile(t, filepath.Join(tmpDir, "root", "section1", "index.md"), `--- +leafwiki_id: sec-1 +leafwiki_title: Section One +--- +# Section One`, 0o644) + + mustWriteFile(t, filepath.Join(tmpDir, "root", "section1", "page2.md"), `--- +leafwiki_id: page-2 +leafwiki_title: Page Two +--- +# Page Two`, 0o644) + + // Reconstruct the tree from filesystem + err := svc.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS failed: %v", err) + } + + // Verify metadata was backfilled for all nodes + tree := svc.GetTree() + + // Check root metadata + if tree.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected root metadata CreatedAt to be backfilled, got zero") + } + if tree.Metadata.UpdatedAt.IsZero() { + t.Fatalf("expected root metadata UpdatedAt to be backfilled, got zero") + } + + // Find and verify page1 + page1 := findChildBySlug(t, tree, "page1") + if page1.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected page1 metadata CreatedAt to be backfilled, got zero") + } + if page1.Metadata.UpdatedAt.IsZero() { + t.Fatalf("expected page1 metadata UpdatedAt to be backfilled, got zero") + } + + // Find and verify section1 + section1 := findChildBySlug(t, tree, "section1") + if section1.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected section1 metadata CreatedAt to be backfilled, got zero") + } + if section1.Metadata.UpdatedAt.IsZero() { + t.Fatalf("expected section1 metadata UpdatedAt to be backfilled, got zero") + } + + // Find and verify page2 (child of section1) + page2 := findChildBySlug(t, section1, "page2") + if page2.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected page2 metadata CreatedAt to be backfilled, got zero") + } + if page2.Metadata.UpdatedAt.IsZero() { + t.Fatalf("expected page2 metadata UpdatedAt to be backfilled, got zero") + } +} + +func TestTreeService_ReconstructTreeFromFS_PersistsTreeJSON(t *testing.T) { + svc, tmpDir := newLoadedService(t) + + // Create some files on disk manually + mustWriteFile(t, filepath.Join(tmpDir, "root", "readme.md"), `--- +leafwiki_id: readme-page +leafwiki_title: README +--- +# README`, 0o644) + + // Verify tree.json doesn't exist or is empty before reconstruction + treeJSONPath := filepath.Join(tmpDir, "tree.json") + + // Reconstruct the tree from filesystem + err := svc.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS failed: %v", err) + } + + // Verify tree.json was persisted + info := mustStat(t, treeJSONPath) + if info.Size() == 0 { + t.Fatalf("expected tree.json to have content after reconstruction, got size 0") + } + + // Verify we can reload the tree from the saved tree.json + newSvc := NewTreeService(tmpDir) + if err := newSvc.LoadTree(); err != nil { + t.Fatalf("LoadTree after reconstruction failed: %v", err) + } + + // Verify the tree structure matches + tree := newSvc.GetTree() + if tree == nil || tree.ID != "root" { + t.Fatalf("expected root node after reload, got: %+v", tree) + } + + // Verify the readme page exists + readme := findChildBySlug(t, tree, "readme") + if readme.ID != "readme-page" { + t.Fatalf("expected readme ID to be 'readme-page', got %q", readme.ID) + } + if readme.Title != "README" { + t.Fatalf("expected readme title to be 'README', got %q", readme.Title) + } + + // Verify metadata was persisted + if readme.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected persisted metadata CreatedAt to not be zero") + } + if readme.Metadata.UpdatedAt.IsZero() { + t.Fatalf("expected persisted metadata UpdatedAt to not be zero") + } +} + +func TestTreeService_ReconstructTreeFromFS_ComplexTree_PreservesStructure(t *testing.T) { + svc, tmpDir := newLoadedService(t) + + // Create a complex tree structure on disk + mustWriteFile(t, filepath.Join(tmpDir, "root", "intro.md"), `--- +leafwiki_id: intro +leafwiki_title: Introduction +--- +# Introduction`, 0o644) + + mustMkdir(t, filepath.Join(tmpDir, "root", "docs")) + mustWriteFile(t, filepath.Join(tmpDir, "root", "docs", "index.md"), `--- +leafwiki_id: docs-section +leafwiki_title: Documentation +--- +# Documentation`, 0o644) + + mustWriteFile(t, filepath.Join(tmpDir, "root", "docs", "getting-started.md"), `--- +leafwiki_id: getting-started +leafwiki_title: Getting Started +--- +# Getting Started`, 0o644) + + mustMkdir(t, filepath.Join(tmpDir, "root", "docs", "guides")) + mustWriteFile(t, filepath.Join(tmpDir, "root", "docs", "guides", "index.md"), `--- +leafwiki_id: guides-section +leafwiki_title: Guides +--- +# Guides`, 0o644) + + mustWriteFile(t, filepath.Join(tmpDir, "root", "docs", "guides", "basic.md"), `--- +leafwiki_id: basic-guide +leafwiki_title: Basic Guide +--- +# Basic Guide`, 0o644) + + // Reconstruct + err := svc.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS failed: %v", err) + } + + tree := svc.GetTree() + + // Verify structure + intro := findChildBySlug(t, tree, "intro") + if intro.Kind != NodeKindPage { + t.Fatalf("expected intro to be a page, got %q", intro.Kind) + } + + docs := findChildBySlug(t, tree, "docs") + if docs.Kind != NodeKindSection { + t.Fatalf("expected docs to be a section, got %q", docs.Kind) + } + if docs.ID != "docs-section" { + t.Fatalf("expected docs ID to be 'docs-section', got %q", docs.ID) + } + + gettingStarted := findChildBySlug(t, docs, "getting-started") + if gettingStarted.Kind != NodeKindPage { + t.Fatalf("expected getting-started to be a page, got %q", gettingStarted.Kind) + } + + guides := findChildBySlug(t, docs, "guides") + if guides.Kind != NodeKindSection { + t.Fatalf("expected guides to be a section, got %q", guides.Kind) + } + + basic := findChildBySlug(t, guides, "basic") + if basic.Kind != NodeKindPage { + t.Fatalf("expected basic to be a page, got %q", basic.Kind) + } + + // Verify all nodes have metadata + if intro.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected intro to have metadata") + } + if docs.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected docs to have metadata") + } + if guides.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected guides to have metadata") + } + if basic.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected basic to have metadata") + } + + // Verify tree.json was saved and can be reloaded + treeJSONPath := filepath.Join(tmpDir, "tree.json") + mustStat(t, treeJSONPath) + + reloadedSvc := NewTreeService(tmpDir) + if err := reloadedSvc.LoadTree(); err != nil { + t.Fatalf("LoadTree after reconstruction failed: %v", err) + } + + reloadedTree := reloadedSvc.GetTree() + if len(reloadedTree.Children) != len(tree.Children) { + t.Fatalf("expected reloaded tree to have same number of children") + } +} + +func TestTreeService_ReconstructTreeFromFS_EmptyDirectory_CreatesRootAndPersists(t *testing.T) { + svc, tmpDir := newLoadedService(t) + + // Reconstruct from empty directory (should create just root) + err := svc.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS failed: %v", err) + } + + tree := svc.GetTree() + if tree == nil || tree.ID != "root" { + t.Fatalf("expected root node, got: %+v", tree) + } + + // Note: Root metadata may not be backfilled from filesystem when directory is empty + // because there's no corresponding file/directory to stat. This is expected behavior. + // The important thing is that the tree is reconstructed and persisted. + + // Verify tree.json was saved + treeJSONPath := filepath.Join(tmpDir, "tree.json") + mustStat(t, treeJSONPath) + + // Verify we can reload + reloadedSvc := NewTreeService(tmpDir) + if err := reloadedSvc.LoadTree(); err != nil { + t.Fatalf("LoadTree after reconstruction failed: %v", err) + } + + reloadedTree := reloadedSvc.GetTree() + if reloadedTree == nil || reloadedTree.ID != "root" { + t.Fatalf("expected root node after reload") + } +} + +func TestTreeService_ReconstructTreeFromFS_RevertsOnMetadataBackfillError(t *testing.T) { + // This test is harder to trigger without mocking, but we can at least verify + // that if the tree state is preserved if we can cause a failure scenario. + // For now, we'll test that a successful reconstruction doesn't lose the old tree. + svc, tmpDir := newLoadedService(t) + + // Create initial tree state + initialID, err := svc.CreateNode("system", nil, "Initial", "initial", ptrKind(NodeKindPage)) + if err != nil { + t.Fatalf("CreateNode failed: %v", err) + } + + // Get initial tree + initialTree := svc.GetTree() + if len(initialTree.Children) != 1 { + t.Fatalf("expected 1 child in initial tree") + } + + // Create a new file on disk + mustWriteFile(t, filepath.Join(tmpDir, "root", "new-page.md"), `--- +leafwiki_id: new-page +leafwiki_title: New Page +--- +# New Page`, 0o644) + + // Reconstruct should succeed + err = svc.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS failed: %v", err) + } + + // Verify new tree has both nodes + newTree := svc.GetTree() + if len(newTree.Children) != 2 { + t.Fatalf("expected 2 children after reconstruction, got %d", len(newTree.Children)) + } + + // Verify initial node still exists + var foundInitial bool + for _, child := range newTree.Children { + if child.ID == *initialID { + foundInitial = true + break + } + } + if !foundInitial { + t.Fatalf("expected initial node to still exist after reconstruction") + } +} + // --- small util --- func ptrKind(k NodeKind) *NodeKind { return &k } diff --git a/internal/http/router.go b/internal/http/router.go index 5bffb70e..95a12683 100644 --- a/internal/http/router.go +++ b/internal/http/router.go @@ -4,6 +4,7 @@ import ( "embed" "io/fs" "log" + "log/slog" "net/http" "os" "path/filepath" @@ -29,6 +30,26 @@ var EmbedFrontend = "false" // Environment is a flag to set the environment var Environment = "development" +// Slog Wrapper for Gin (Info level) +type slogWriter struct { + logger *slog.Logger +} + +func (sw *slogWriter) Write(p []byte) (n int, err error) { + sw.logger.Info(strings.TrimSpace(string(p))) + return len(p), nil +} + +// Slog Wrapper for Gin Errors (Error level) +type slogErrorWriter struct { + logger *slog.Logger +} + +func (sew *slogErrorWriter) Write(p []byte) (n int, err error) { + sew.logger.Error(strings.TrimSpace(string(p))) + return len(p), nil +} + type RouterOptions struct { PublicAccess bool // Whether the wiki allows public read access InjectCodeInHeader string // Raw HTML/JS code to inject into the tag @@ -60,6 +81,10 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { gin.SetMode(gin.DebugMode) } + // Set Gin to use slog for logging + gin.DefaultWriter = &slogWriter{logger: slog.Default().With("component", "gin")} + gin.DefaultErrorWriter = &slogErrorWriter{logger: slog.Default().With("component", "gin")} + importerService := wireImporterService(wikiInstance) router := gin.Default() diff --git a/internal/importer/executor.go b/internal/importer/executor.go index 6e5a0e97..6fe3e017 100644 --- a/internal/importer/executor.go +++ b/internal/importer/executor.go @@ -3,10 +3,9 @@ package importer import ( "fmt" "log/slog" - "os" "path/filepath" - "github.com/perber/wiki/internal/core/tree" + "github.com/perber/wiki/internal/core/markdown" ) type ExecutionResult struct { @@ -94,18 +93,17 @@ func (e *Executor) Execute(userID string) (*ExecutionResult, error) { continue } sourceAbs := filepath.Join(e.planOptions.SourceBasePath, filepath.FromSlash(item.SourcePath)) - content, err := os.ReadFile(sourceAbs) + mdFile, err := markdown.LoadMarkdownFile(sourceAbs) if err != nil { errMsg := err.Error() execItem.Action = ExecutionActionSkipped execItem.Error = &errMsg result.SkippedCount++ result.Items = append(result.Items, execItem) - e.logger.Error("Failed to read source file", "source_path", sourceAbs, "error", err) + e.logger.Error("Failed to load source file", "source_path", sourceAbs, "error", err) continue } - // Strip frontmatter if any - _, body, _ := tree.SplitFrontmatter(string(content)) + body := mdFile.GetContent() if _, err := e.wiki.UpdatePage(userID, page.ID, page.Title, page.Slug, &body, &page.Kind); err != nil { errMsg := err.Error() execItem.Action = ExecutionActionSkipped diff --git a/internal/importer/planner.go b/internal/importer/planner.go index 895e640f..e4d5bdf5 100644 --- a/internal/importer/planner.go +++ b/internal/importer/planner.go @@ -9,6 +9,7 @@ import ( "path/filepath" "strings" + "github.com/perber/wiki/internal/core/markdown" "github.com/perber/wiki/internal/core/shared" "github.com/perber/wiki/internal/core/tree" ) @@ -149,14 +150,32 @@ func (p *Planner) analyzeEntry(mdFile ImportMDFile, options PlanOptions) (*PlanI return nil, err } - title, titleErr := p.extractTitleFromMDFile(sourcePath) var notes []string - if titleErr != nil { - notes = append(notes, fmt.Sprintf("Failed to extract title from file: %v", titleErr)) + md, err := markdown.LoadMarkdownFile(sourcePath) + if err != nil { + notes = append(notes, fmt.Sprintf("Failed to load markdown file for title extraction: %v", err)) } - if !result.Exists { + // Determine fallback title + title := path.Base(wikiPath) // fallback to last segment of wiki path + if wikiPath == "" { + // For root-level index.md or empty paths, use filename without extension + title = strings.TrimSuffix(filenameLower, path.Ext(filenameLower)) + if title == "" { + title = "root" + } + } + if md != nil { + var titleErr error + title, titleErr = md.GetTitle() + if titleErr != nil { + notes = append(notes, fmt.Sprintf("Failed to extract title from file: %v", titleErr)) + title = "unknown" // ensure title is set + } + } + + if !result.Exists { // slug = last segment slug := "" if wikiPath != "" { @@ -193,73 +212,3 @@ func (p *Planner) analyzeEntry(mdFile ImportMDFile, options PlanOptions) (*PlanI Notes: notes, }, nil } - -func (p *Planner) extractTitleFromMDFile(mdFilePath string) (string, error) { - // Helper to get filename-based fallback - filenameFallback := func() string { - base := path.Base(mdFilePath) - return strings.TrimSuffix(base, path.Ext(base)) - } - - // Read the file content - content, err := os.ReadFile(mdFilePath) - if err != nil { - // If we can't read the file, return filename as fallback but keep the error - return filenameFallback(), err - } - - stripSingleAndDoubleQuotes := func(s string, err error) (string, error) { - if err != nil { - return "", err - } - s = strings.Trim(s, `"`) - s = strings.Trim(s, `'`) - return s, nil - } - - // Try to extract title from frontmatter - title, err := stripSingleAndDoubleQuotes(p.extractTitleFromFrontMatter(content)) - if err == nil && title != "" { - return title, nil - } - - // Try to extract title from first heading - title, err = stripSingleAndDoubleQuotes(p.extractTitleFromFirstHeading(content)) - if err == nil && title != "" { - return title, nil - } - - // strip extension from filename - return stripSingleAndDoubleQuotes(filenameFallback(), nil) -} - -func (p *Planner) extractTitleFromFrontMatter(content []byte) (string, error) { - frontMatter, _, has := tree.SplitFrontmatter(string(content)) - if !has { - return "", errors.New("no frontmatter found") - } - - // Look for title or leafwiki_title in the frontmatter - lines := strings.Split(frontMatter, "\n") - for _, line := range lines { - if strings.HasPrefix(line, "title:") { - return strings.TrimSpace(strings.TrimPrefix(line, "title:")), nil - } - if strings.HasPrefix(line, "leafwiki_title:") { - return strings.TrimSpace(strings.TrimPrefix(line, "leafwiki_title:")), nil - } - } - return "", errors.New("no title found in frontmatter") -} - -func (p *Planner) extractTitleFromFirstHeading(content []byte) (string, error) { - // Simple first heading extraction - lines := strings.Split(string(content), "\n") - for _, line := range lines { - line = strings.TrimSpace(line) - if strings.HasPrefix(line, "# ") { - return strings.TrimSpace(strings.TrimPrefix(line, "# ")), nil - } - } - return "", errors.New("no heading found") -} diff --git a/internal/importer/planner_test.go b/internal/importer/planner_test.go index 6e49d48e..74edcb2c 100644 --- a/internal/importer/planner_test.go +++ b/internal/importer/planner_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/perber/wiki/internal/core/tree" + "github.com/perber/wiki/internal/test_utils" ) type fakeWiki struct { @@ -79,25 +80,13 @@ func (f *fakeWiki) UpdatePage(userID string, id, title, slug string, content *st }}, nil } -func writeFile(t *testing.T, base, rel, content string) string { - t.Helper() - abs := filepath.Join(base, filepath.FromSlash(rel)) - if err := os.MkdirAll(filepath.Dir(abs), 0o755); err != nil { - t.Fatalf("mkdir: %v", err) - } - if err := os.WriteFile(abs, []byte(content), 0o644); err != nil { - t.Fatalf("write: %v", err) - } - return abs -} - func newPlannerWithFake(w *fakeWiki) *Planner { return NewPlanner(w, tree.NewSlugService()) } func TestPlanner_CreatePlan_CreateNewPage_NonIndex(t *testing.T) { tmp := t.TempDir() - writeFile(t, tmp, "My Page.md", "# Hello\n\nbody") + test_utils.WriteFile(t, tmp, "My Page.md", "# Hello\n\nbody") wiki := &fakeWiki{ treeHash: "h1", @@ -142,7 +131,7 @@ func TestPlanner_CreatePlan_CreateNewPage_NonIndex(t *testing.T) { func TestPlanner_CreatePlan_CreateNewSection_IndexMd(t *testing.T) { tmp := t.TempDir() - writeFile(t, tmp, "Guides/index.md", "---\ntitle: Guides\n---\n\n# Ignored") + test_utils.WriteFile(t, tmp, "Guides/index.md", "---\ntitle: Guides\n---\n\n# Ignored") wiki := &fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}} p := newPlannerWithFake(wiki) @@ -175,7 +164,7 @@ func TestPlanner_CreatePlan_CreateNewSection_IndexMd(t *testing.T) { func TestPlanner_CreatePlan_SkipExisting_UsesLookupLastSegment(t *testing.T) { tmp := t.TempDir() - writeFile(t, tmp, "a.md", "# A") + test_utils.WriteFile(t, tmp, "a.md", "# A") existingID := "id123" existingKind := tree.NodeKindPage @@ -268,7 +257,7 @@ func TestPlanner_CreatePlan_Error_SourceIsDirectory_IsCollected(t *testing.T) { func TestPlanner_CreatePlan_Error_ExistingZeroSegments_IsCollected(t *testing.T) { tmp := t.TempDir() - writeFile(t, tmp, "x.md", "# X") + test_utils.WriteFile(t, tmp, "x.md", "# X") wiki := &fakeWiki{ treeHash: "h", @@ -295,69 +284,9 @@ func TestPlanner_CreatePlan_Error_ExistingZeroSegments_IsCollected(t *testing.T) // ---- Title extraction ------------------------------------------------------- -func TestPlanner_extractTitleFromMDFile_FrontmatterTitleWins(t *testing.T) { - tmp := t.TempDir() - abs := writeFile(t, tmp, "t.md", "---\ntitle: FM Title\n---\n\n# Heading") - - p := newPlannerWithFake(&fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}}) - - title, err := p.extractTitleFromMDFile(abs) - if err != nil { - t.Fatalf("err: %v", err) - } - if title != "FM Title" { - t.Fatalf("title = %q", title) - } -} - -func TestPlanner_extractTitleFromMDFile_LeafwikiTitle(t *testing.T) { - tmp := t.TempDir() - abs := writeFile(t, tmp, "t.md", "---\nleafwiki_title: Leaf\n---\n\n# Heading") - - p := newPlannerWithFake(&fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}}) - - title, err := p.extractTitleFromMDFile(abs) - if err != nil { - t.Fatalf("err: %v", err) - } - if title != "Leaf" { - t.Fatalf("title = %q", title) - } -} - -func TestPlanner_extractTitleFromMDFile_FirstHeadingFallback(t *testing.T) { - tmp := t.TempDir() - abs := writeFile(t, tmp, "t.md", "no fm\n\n# Heading Only\nx") - - p := newPlannerWithFake(&fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}}) - - title, err := p.extractTitleFromMDFile(abs) - if err != nil { - t.Fatalf("err: %v", err) - } - if title != "Heading Only" { - t.Fatalf("title = %q", title) - } -} - -func TestPlanner_extractTitleFromMDFile_FilenameFallback(t *testing.T) { - tmp := t.TempDir() - abs := writeFile(t, tmp, "some-file.md", "no title") - - p := newPlannerWithFake(&fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}}) - - title, err := p.extractTitleFromMDFile(abs) - if err != nil { - t.Fatalf("err: %v", err) - } - if title != "some-file" { - t.Fatalf("title = %q", title) - } -} - func TestPlanner_CreatePlan_TitleExtractionError_AddsNote(t *testing.T) { tmp := t.TempDir() - abs := writeFile(t, tmp, "unreadable.md", "# Title") + abs := test_utils.WriteFile(t, tmp, "unreadable.md", "# Title") // Make file unreadable to trigger extraction error if err := os.Chmod(abs, 0o000); err != nil { @@ -390,8 +319,8 @@ func TestPlanner_CreatePlan_TitleExtractionError_AddsNote(t *testing.T) { if len(it.Notes) != 1 { t.Fatalf("Notes len = %d (want 1)", len(it.Notes)) } - if !strings.Contains(it.Notes[0], "Failed to extract title") { - t.Fatalf("Note = %q (should contain 'Failed to extract title')", it.Notes[0]) + if !strings.Contains(it.Notes[0], "Failed to load markdown file for title extraction") { + t.Fatalf("Note = %q (should contain 'Failed to load markdown file for title extraction')", it.Notes[0]) } // Title should still be set (fallback to filename) if it.Title != "unreadable" { @@ -402,7 +331,7 @@ func TestPlanner_CreatePlan_TitleExtractionError_AddsNote(t *testing.T) { func TestPlanner_analyzeEntry_NormalizesSourceDirSegments(t *testing.T) { // "My Guides/Intro.md" -> "my-guides/intro" (SlugService.NormalizePath + NormalizeFilename) tmp := t.TempDir() - writeFile(t, tmp, "My Guides/Intro.md", "# Intro") + test_utils.WriteFile(t, tmp, "My Guides/Intro.md", "# Intro") wiki := &fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}} p := newPlannerWithFake(wiki) @@ -426,7 +355,7 @@ func TestPlanner_analyzeEntry_InvalidSourceDirSegment_ReturnsError(t *testing.T) // NormalizePath(validate=true) nutzt IsValidSlug() nach slug.Make(). // Ein Segment wie "!!!" sluggt zu "" => invalid. tmp := t.TempDir() - writeFile(t, tmp, "!!!/a.md", "# A") + test_utils.WriteFile(t, tmp, "!!!/a.md", "# A") wiki := &fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}} p := newPlannerWithFake(wiki) @@ -449,3 +378,57 @@ func TestPlanner_analyzeEntry_InvalidSourceDirSegment_ReturnsError(t *testing.T) t.Fatalf("unexpected error: %v", res.Errors[0]) } } + +func TestPlanner_CreatePlan_RootIndexMd_EmptyWikiPath_UsesFallbackTitle(t *testing.T) { + // Test case for root-level index.md with empty TargetBasePath and markdown loading failure + // When wikiPath is empty, path.Base("") returns ".", which is not meaningful. + // The fix should use filename without extension as fallback. + tmp := t.TempDir() + abs := test_utils.WriteFile(t, tmp, "index.md", "# Title") + + // Make file unreadable to trigger markdown loading failure + if err := os.Chmod(abs, 0o000); err != nil { + t.Fatalf("chmod: %v", err) + } + defer func() { + if err := os.Chmod(abs, 0o644); err != nil { // restore for cleanup + t.Fatalf("chmod restore: %v", err) + } + }() + + wiki := &fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}} + p := newPlannerWithFake(wiki) + + res, err := p.CreatePlan([]ImportMDFile{{SourcePath: "index.md"}}, PlanOptions{ + SourceBasePath: tmp, + TargetBasePath: "", // empty target base path + }) + if err != nil { + t.Fatalf("CreatePlan err: %v", err) + } + if len(res.Errors) != 0 { + t.Fatalf("Errors = %#v", res.Errors) + } + if len(res.Items) != 1 { + t.Fatalf("Items len = %d (want 1)", len(res.Items)) + } + + it := res.Items[0] + if it.TargetPath != "" { + t.Fatalf("TargetPath = %q (want empty)", it.TargetPath) + } + if it.Kind != tree.NodeKindSection { + t.Fatalf("Kind = %v (want Section)", it.Kind) + } + // The title should fallback to "index" (filename without .md), not "." from path.Base("") + if it.Title != "index" { + t.Fatalf("Title = %q (want index as fallback when wikiPath is empty and markdown fails)", it.Title) + } + // Should have a note about failed markdown loading + if len(it.Notes) == 0 { + t.Fatalf("Expected notes about failed markdown loading") + } + if !strings.Contains(it.Notes[0], "Failed to load markdown file for title extraction") { + t.Fatalf("Note = %q (should contain 'Failed to load markdown file for title extraction')", it.Notes[0]) + } +} diff --git a/internal/search/sqlite_index.go b/internal/search/sqlite_index.go index 2d0fe695..87b69445 100644 --- a/internal/search/sqlite_index.go +++ b/internal/search/sqlite_index.go @@ -9,7 +9,7 @@ import ( "sync" "github.com/microcosm-cc/bluemonday" - "github.com/perber/wiki/internal/core/tree" + "github.com/perber/wiki/internal/core/markdown" "github.com/russross/blackfriday/v2" _ "modernc.org/sqlite" // Import SQLite driver ) @@ -160,7 +160,7 @@ func (s *SQLiteIndex) Close() error { } func (s *SQLiteIndex) IndexPage(path string, filePath string, pageID string, title string, raw string) error { - _, content, _, err := tree.ParseFrontmatter(raw) + _, content, _, err := markdown.ParseFrontmatter(raw) if err != nil { return err } diff --git a/internal/test_utils/common.go b/internal/test_utils/common.go index 9218a3f1..0340d7e9 100644 --- a/internal/test_utils/common.go +++ b/internal/test_utils/common.go @@ -4,6 +4,9 @@ import ( "bytes" "fmt" "mime/multipart" + "os" + "path/filepath" + "testing" ) // CreateMultipartFile simulates a real file upload using multipart encoding @@ -34,3 +37,15 @@ func CreateMultipartFile(filename string, content []byte) (multipart.File, strin f, err := files[0].Open() return f, files[0].Filename, err } + +func WriteFile(t *testing.T, base, rel, content string) string { + t.Helper() + abs := filepath.Join(base, filepath.FromSlash(rel)) + if err := os.MkdirAll(filepath.Dir(abs), 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + if err := os.WriteFile(abs, []byte(content), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + return abs +} diff --git a/readme.md b/readme.md index 8589f8a9..cb5f0c90 100644 --- a/readme.md +++ b/readme.md @@ -392,6 +392,73 @@ python3 tools/generate-tree.py --root data/root --output data/tree.json --- +## Reconstruct Tree from Filesystem + +LeafWiki includes a built-in `reconstruct-tree` command that rebuilds the navigation tree (`tree.json`) by scanning the actual Markdown files and folders on disk. + +**Usage:** + +```bash +leafwiki [--data-dir ] reconstruct-tree +``` + +Or if you installed LeafWiki as a binary: + +```bash +./leafwiki [--data-dir ] reconstruct-tree +``` + +**What it does:** + +The command: +- Scans the `data/root` directory recursively +- Extracts page titles from Markdown files (from H1 headings or frontmatter) +- Preserves `leafwiki_id` values from frontmatter when present +- Generates new IDs for pages without frontmatter IDs +- Rebuilds the complete navigation tree structure +- Saves the new `tree.json` and updates `schema.json` + +**Use cases:** + +1. **Recovery from corrupted tree.json** + If your `tree.json` becomes corrupted or deleted, this command reconstructs it from your existing Markdown files. + +2. **Manual filesystem changes** + If you've added, moved, or renamed Markdown files directly on disk (outside LeafWiki's UI), run this command to sync the navigation tree. + +3. **Migration and import** + When importing existing Markdown content into LeafWiki, use this command to automatically generate the navigation structure. + +4. **Tree structure reset** + If the tree structure becomes inconsistent with the filesystem, this provides a clean rebuild based on actual file layout. + +**Important notes:** + +- ⚠️ This command **replaces the entire tree structure**. Any custom ordering or metadata in `tree.json` will be lost. +- The command creates a **deterministic, alphabetically-sorted** tree based on file and folder names. +- Page content (Markdown files) is never modified—only the navigation structure is rebuilt. +- Frontmatter `leafwiki_id` values are preserved when present, maintaining page identity and internal links. +- For folders (sections), the command looks for `index.md` to extract the section title. +- Files and folders starting with `.` (hidden) are automatically skipped. + +**Example:** + +```bash +# Default data directory (./data) +leafwiki reconstruct-tree + +# Custom data directory +leafwiki --data-dir /path/to/data reconstruct-tree +``` + +**Before running this command:** + +- Ensure your data directory exists and contains a `root` folder with your Markdown content +- Consider backing up your current `tree.json` if you need to preserve custom ordering +- The server does not need to be running—this is a standalone command + +--- + ## Quick Start (Dev) ``` From bd532abb6f8d12de494367e68c25e95b42c5b1d1 Mon Sep 17 00:00:00 2001 From: perber Date: Mon, 2 Feb 2026 19:16:35 +0100 Subject: [PATCH 10/11] feat(ux): improve default section design (#660) --- .../viewer/EmptySectionChildrenList.tsx | 85 ++++++++++++++----- 1 file changed, 62 insertions(+), 23 deletions(-) diff --git a/ui/leafwiki-ui/src/features/viewer/EmptySectionChildrenList.tsx b/ui/leafwiki-ui/src/features/viewer/EmptySectionChildrenList.tsx index f6924e71..e593c77d 100644 --- a/ui/leafwiki-ui/src/features/viewer/EmptySectionChildrenList.tsx +++ b/ui/leafwiki-ui/src/features/viewer/EmptySectionChildrenList.tsx @@ -1,11 +1,11 @@ import { Button } from '@/components/ui/button' -import { NODE_KIND_SECTION, Page } from '@/lib/api/pages' +import { NODE_KIND_PAGE, NODE_KIND_SECTION, Page } from '@/lib/api/pages' import { formatRelativeTime } from '@/lib/formatDate' import { DIALOG_ADD_PAGE } from '@/lib/registries' import { useIsReadOnly } from '@/lib/useIsReadOnly' import { useDialogsStore } from '@/stores/dialogs' import { useTreeStore } from '@/stores/tree' -import { FilePlus } from 'lucide-react' +import { FilePlus, FolderPlus } from 'lucide-react' import { Link } from 'react-router-dom' type EmptySectionChildrenListProps = { @@ -51,9 +51,16 @@ export default function EmptySectionChildrenList({ aria-label={`Subpages of ${page.title}`} className="child-list__section" > -

- Pages and Sections in '{page.title}' +

+ Overview of the section '{page.title}'

+

+ This page is the default overview of the section and lists its pages + and sections. +
+ When editing this page, you can define a custom start page for the + section. +

    {node.children?.map((n) => { if (!n) return null @@ -79,14 +86,21 @@ export default function EmptySectionChildrenList({ })}
{!isReadOnly && ( - +
+ +
)} )} @@ -96,20 +110,45 @@ export default function EmptySectionChildrenList({ aria-label={`Subpages of ${page.title}`} className="child-list__section" > -
-

- No Pages and Sections in '{page.title}' +
+

+ This section is empty.

+

+ The section {page.title} does not contain any pages or + sections yet. Start by adding a new page or create a new section. +

{!isReadOnly && ( - +
+ + +
)} )} From 8d424d442bfdbe7660c3ef3f2f74b980aa90f9a2 Mon Sep 17 00:00:00 2001 From: perber Date: Mon, 2 Feb 2026 19:48:27 +0100 Subject: [PATCH 11/11] feat: open node when clicking link (#661) --- ui/leafwiki-ui/src/features/tree/TreeNode.tsx | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/ui/leafwiki-ui/src/features/tree/TreeNode.tsx b/ui/leafwiki-ui/src/features/tree/TreeNode.tsx index 860a1ae3..2f4595b9 100644 --- a/ui/leafwiki-ui/src/features/tree/TreeNode.tsx +++ b/ui/leafwiki-ui/src/features/tree/TreeNode.tsx @@ -54,6 +54,22 @@ export const TreeNode = React.memo(function TreeNode({ to={`/${node.path}`} className="tree-node__link" data-testid={`tree-node-link-${node.id}`} + onClick={(e) => { + // Only toggle sections on click + if (node.kind !== NODE_KIND_SECTION) return + + // Prevent toggling when using modifier keys or middle mouse button + if ( + e.metaKey || + e.ctrlKey || + e.shiftKey || + e.altKey || + e.button === 1 + ) { + return + } + toggleNode(node.id) + }} >