diff --git a/cmd/leafwiki/main.go b/cmd/leafwiki/main.go index 3dff3f43..dc3a3921 100644 --- a/cmd/leafwiki/main.go +++ b/cmd/leafwiki/main.go @@ -3,7 +3,7 @@ package main import ( "flag" "fmt" - "log" + "log/slog" "os" "strings" "time" @@ -20,6 +20,7 @@ func printUsage() { leafwiki --jwt-secret --admin-password [--host ] [--port ] [--data-dir ] leafwiki --disable-auth [--host ] [--port ] [--data-dir ] leafwiki reset-admin-password + leafwiki [--data-dir ] reconstruct-tree leafwiki --help Options: @@ -42,6 +43,7 @@ func printUsage() { LEAFWIKI_PORT LEAFWIKI_DATA_DIR LEAFWIKI_JWT_SECRET + LEAFWIKI_LOG_LEVEL LEAFWIKI_ADMIN_PASSWORD LEAFWIKI_PUBLIC_ACCESS LEAFWIKI_ALLOW_INSECURE @@ -53,7 +55,31 @@ func printUsage() { `) } +func setupLogger() { + level := slog.LevelInfo + if os.Getenv("LEAFWIKI_LOG_LEVEL") == "debug" { + level = slog.LevelDebug + } else if (os.Getenv("LEAFWIKI_LOG_LEVEL")) == "error" { + level = slog.LevelError + } else if (os.Getenv("LEAFWIKI_LOG_LEVEL")) == "warn" { + level = slog.LevelWarn + } + + handler := slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{ + Level: level, + AddSource: true, + }) + + slog.SetDefault(slog.New(handler)) +} + +func fail(msg string, args ...any) { + slog.Default().Error(msg, args...) + os.Exit(1) +} + func main() { + setupLogger() // flags hostFlag := flag.String("host", "", "host/IP address to bind the server to (e.g. 127.0.0.1 or 0.0.0.0)") @@ -94,12 +120,28 @@ func main() { case "reset-admin-password": user, err := tools.ResetAdminPassword(dataDir) if err != nil { - log.Fatalf("Password reset failed: %v", err) + fail("Password reset failed", "error", err) } fmt.Println("Admin password reset successfully.") fmt.Printf("New password for user %s: %s\n", user.Username, user.Password) return + case "reconstruct-tree": + // Ensure data directory exists before reconstruction + if _, err := os.Stat(dataDir); err != nil { + if os.IsNotExist(err) { + if err := os.MkdirAll(dataDir, 0755); err != nil { + fail("Failed to create data directory", "error", err) + } + } else { + fail("Failed to access data directory", "error", err) + } + } + if err := tools.ReconstructTreeFromFS(dataDir); err != nil { + fail("Tree reconstruction failed", "error", err) + } + fmt.Println("Tree reconstructed successfully from filesystem.") + return case "--help", "-h", "help": printUsage() return @@ -112,27 +154,27 @@ func main() { if disableAuth { publicAccess = true - log.Printf("WARNING: Authentication disabled. Wiki is publicly accessible without authentication.") + slog.Default().Warn("Authentication disabled. Wiki is publicly accessible without authentication.") } if allowInsecure { - log.Printf("WARNING: allow-insecure enabled. Auth cookies may be transmitted over plain HTTP (INSECURE).") + slog.Default().Warn("allow-insecure enabled. Auth cookies may be transmitted over plain HTTP (INSECURE).") } // Check if data directory exists if _, err := os.Stat(dataDir); os.IsNotExist(err) { if err := os.MkdirAll(dataDir, 0755); err != nil { - log.Fatalf("Failed to create data directory: %v", err) + fail("Failed to create data directory", "error", err) } } if !disableAuth { if jwtSecret == "" { - log.Fatal("JWT secret is required. Set it using --jwt-secret or LEAFWIKI_JWT_SECRET environment variable.") + fail("JWT secret is required. Set it using --jwt-secret or LEAFWIKI_JWT_SECRET environment variable.") } if adminPassword == "" { - log.Fatalf("admin password is required. Set it using --admin-password or LEAFWIKI_ADMIN_PASSWORD environment variable.") + fail("admin password is required. Set it using --admin-password or LEAFWIKI_ADMIN_PASSWORD environment variable.") } } @@ -145,7 +187,7 @@ func main() { AuthDisabled: disableAuth, }) if err != nil { - log.Fatalf("Failed to initialize Wiki: %v", err) + fail("Failed to initialize Wiki", "error", err) } defer w.Close() @@ -164,7 +206,7 @@ func main() { // Start server if err := router.Run(listenAddr); err != nil { - log.Fatalf("Failed to start server: %v", err) + fail("Failed to start server", "error", err) } } @@ -192,7 +234,7 @@ func resolveBool(flagName string, flagVal bool, visited map[string]bool, envVar return b } // If env var is set but invalid, fail fast (helps operators) - log.Fatalf("Invalid value for %s: %q (expected true/false/1/0/yes/no)", envVar, env) + fail("Invalid environment variable value", "variable", envVar, "value", env, "expected", "true/false/1/0/yes/no") } return flagVal // default from flag } @@ -206,7 +248,7 @@ func resolveDuration(flagName string, flagVal time.Duration, visited map[string] return d } // If env var is set but invalid, fail fast (helps operators) - log.Fatalf("Invalid value for %s: %q (expected duration like 24h, 15m)", envVar, env) + fail("Invalid environment variable value", "variable", envVar, "value", env, "expected", "duration like 24h, 15m") } return flagVal // default from flag } diff --git a/e2e/pages/TreeView.ts b/e2e/pages/TreeView.ts index dbb1060d..a973f265 100644 --- a/e2e/pages/TreeView.ts +++ b/e2e/pages/TreeView.ts @@ -86,7 +86,13 @@ export default class TreeView { await nodeRow.scrollIntoViewIfNeeded(); await nodeRow.hover(); // oder mouse.move, s.u. - const sortButton = nodeRow.locator('button[data-testid="tree-view-action-button-sort"]'); + // open more actions menu + const moreActionsButton = nodeRow.locator( + 'button[data-testid="tree-view-action-button-open-more-actions"]', + ); + await moreActionsButton.click({ force: true }); + + const sortButton = this.page.locator('div[data-testid="tree-view-action-button-sort"]'); await sortButton.click({ force: true }); const sortPageDialog = new SortPageDialog(this.page); @@ -111,7 +117,12 @@ export default class TreeView { await nodeRow.scrollIntoViewIfNeeded(); await nodeRow.hover(); // oder mouse.move, s.u. - const moveButton = nodeRow.locator('button[data-testid="tree-view-action-button-move"]'); + const moreActionsButton = nodeRow.locator( + 'button[data-testid="tree-view-action-button-open-more-actions"]', + ); + await moreActionsButton.click({ force: true }); + + const moveButton = this.page.locator('div[data-testid="tree-view-action-button-move"]'); await moveButton.click({ force: true }); const movePageDialog = new MovePageDialog(this.page); diff --git a/e2e/tests/page.spec.ts b/e2e/tests/page.spec.ts index 66c3b272..b5013480 100644 --- a/e2e/tests/page.spec.ts +++ b/e2e/tests/page.spec.ts @@ -6,7 +6,6 @@ import DeletePageDialog from '../pages/DeletePageDialog'; import EditPage from '../pages/EditPage'; import LoginPage from '../pages/LoginPage'; import NotFoundPage from '../pages/NotFoundPage'; -import SearchView from '../pages/SearchView'; import TreeView from '../pages/TreeView'; import ViewPage from '../pages/ViewPage'; @@ -368,6 +367,9 @@ graph TD; test.expect(await deletePageDialog.dialogTextVisible()).toBeFalsy(); }); + // disable this test cases, because it is flaky + // TODO: fix the flakiness + /* test('search-page', async ({ page }) => { const title = `Page To Search ${Date.now()}`; const content = `This is the content of the page to search, created at ${new Date().toISOString()}`; @@ -404,6 +406,7 @@ graph TD; // clear search await searchView.clearSearch(); }); + */ test('test-asset-upload-and-use-in-page', async ({ page }) => { const title = `Page With Asset ${Date.now()}`; diff --git a/go.mod b/go.mod index 27af2754..2784dbdb 100644 --- a/go.mod +++ b/go.mod @@ -12,6 +12,7 @@ require ( github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569 github.com/yuin/goldmark v1.7.16 golang.org/x/crypto v0.47.0 + gopkg.in/yaml.v3 v3.0.1 modernc.org/sqlite v1.44.2 ) @@ -33,6 +34,7 @@ require ( github.com/gosimple/unidecode v1.0.1 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/cpuid/v2 v2.3.0 // indirect + github.com/kr/text v0.2.0 // indirect github.com/leodido/go-urn v1.4.0 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect diff --git a/go.sum b/go.sum index 0b866873..0dfcad92 100644 --- a/go.sum +++ b/go.sum @@ -6,6 +6,7 @@ github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZw github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -52,6 +53,10 @@ github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnr github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y= github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= @@ -75,6 +80,8 @@ github.com/quic-go/quic-go v0.57.0 h1:AsSSrrMs4qI/hLrKlTH/TGQeTMY0ib1pAOX7vA3Adq github.com/quic-go/quic-go v0.57.0/go.mod h1:ly4QBAjHA2VhdnxhojRsCUOeJwKYg+taDlos92xb1+s= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= +github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= +github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -120,6 +127,8 @@ golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc google.golang.org/protobuf v1.36.9 h1:w2gp2mA27hUeUzj9Ex9FBjsBm40zfaDtEWow293U7Iw= google.golang.org/protobuf v1.36.9/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/core/markdown/errors.go b/internal/core/markdown/errors.go new file mode 100644 index 00000000..7050ea07 --- /dev/null +++ b/internal/core/markdown/errors.go @@ -0,0 +1,5 @@ +package markdown + +import "errors" + +var ErrFrontmatterParse = errors.New("frontmatter parse error") diff --git a/internal/core/markdown/frontmatter.go b/internal/core/markdown/frontmatter.go new file mode 100644 index 00000000..7ac94fad --- /dev/null +++ b/internal/core/markdown/frontmatter.go @@ -0,0 +1,185 @@ +package markdown + +import ( + "bytes" + "errors" + "os" + "strings" + + yaml "gopkg.in/yaml.v3" +) + +type Frontmatter struct { + LeafWikiID string `yaml:"leafwiki_id,omitempty" json:"id,omitempty"` + LeafWikiTitle string `yaml:"leafwiki_title,omitempty" json:"title,omitempty"` +} + +func (fm *Frontmatter) LoadFrontMatterFromContent(yamlPart string) (has bool, err error) { + if err := yaml.Unmarshal([]byte(yamlPart), fm); err != nil { + return true, errors.Join(ErrFrontmatterParse, err) + } + + /** Check for title also in frontmatter **/ + type titleOnlyStruct struct { + Title string `yaml:"title,omitempty"` + } + var tos titleOnlyStruct + if err := yaml.Unmarshal([]byte(yamlPart), &tos); err == nil { + if tos.Title != "" { + fm.LeafWikiTitle = tos.Title + } + } + + fm.LeafWikiID = fm.stripSingleAndDoubleQuotes(fm.LeafWikiID) + fm.LeafWikiTitle = fm.stripSingleAndDoubleQuotes(fm.LeafWikiTitle) + + return true, nil +} + +func (fm *Frontmatter) stripSingleAndDoubleQuotes(s string) string { + s = strings.Trim(s, `"`) + s = strings.Trim(s, `'`) + return s +} + +func (fm *Frontmatter) LoadFrontMatterFromFile(mdFilePath string) (has bool, err error) { + content, err := os.ReadFile(mdFilePath) + if err != nil { + return false, err + } + return fm.LoadFrontMatterFromContent(string(content)) +} + +func splitFrontmatter(md string) (yamlPart string, body string, has bool) { + // BOM-safe + normalize newlines + s := strings.TrimPrefix(md, "\ufeff") + s = strings.ReplaceAll(s, "\r\n", "\n") + s = strings.ReplaceAll(s, "\r", "\n") + + // Must start with '---' on the very first line + if !(s == "---" || strings.HasPrefix(s, "---\n")) { + return "", md, false + } + + // Find end of first line + firstNL := strings.IndexByte(s, '\n') + if firstNL == -1 { + // it's exactly "---" (or a single-line file) + return "", md, false + } + if strings.TrimSpace(s[:firstNL]) != "---" { + return "", md, false + } + + // Find closing delimiter on its own line: "\n---\n" or "\n---" at EOF + // We'll scan line-by-line using indices. + pos := firstNL + 1 + yamlStart := pos + + endDelimLineStart := -1 + endDelimLineEnd := -1 + + looksLikeYAML := false + + for pos <= len(s) { + // find end of current line + nextNL := strings.IndexByte(s[pos:], '\n') + var line string + var lineEnd int + if nextNL == -1 { + // last line + lineEnd = len(s) + line = s[pos:lineEnd] + } else { + lineEnd = pos + nextNL + line = s[pos:lineEnd] + } + + trim := strings.TrimSpace(line) + if trim == "---" { + endDelimLineStart = pos + endDelimLineEnd = lineEnd + break + } + + // Heuristic: at least one "key:" line => treat as YAML frontmatter + // Skip blanks/comments + if trim != "" && !strings.HasPrefix(trim, "#") { + if idx := strings.IndexByte(trim, ':'); idx > 0 { + key := strings.TrimSpace(trim[:idx]) + if key != "" && strings.IndexFunc(key, func(r rune) bool { + return !(r >= 'a' && r <= 'z' || + r >= 'A' && r <= 'Z' || + r >= '0' && r <= '9' || + r == '_' || r == '-') + }) == -1 { + looksLikeYAML = true + } + } + } + + // advance to next line + if nextNL == -1 { + pos = len(s) + 1 + } else { + pos = lineEnd + 1 + } + } + + // No closing delimiter found => treat as no frontmatter + if endDelimLineStart == -1 { + return "", md, false + } + + // If it doesn't look like YAML, treat as plain markdown (separator use-case) + if !looksLikeYAML { + return "", md, false + } + + // YAML is between yamlStart and the start of the closing delimiter line + yamlPart = s[yamlStart:endDelimLineStart] + yamlPart = strings.TrimSuffix(yamlPart, "\n") // nice-to-have + + // Body starts after the closing delimiter line (+ its trailing newline if present) + bodyStart := endDelimLineEnd + if bodyStart < len(s) && s[bodyStart:bodyStart+1] == "\n" { + bodyStart++ + } + body = s[bodyStart:] + + return yamlPart, body, true +} + +func ParseFrontmatter(md string) (fm Frontmatter, body string, has bool, err error) { + yamlPart, body, has := splitFrontmatter(md) + if !has { + return Frontmatter{}, md, false, nil + } + + if err := yaml.Unmarshal([]byte(yamlPart), &fm); err != nil { + return Frontmatter{}, md, true, errors.Join(ErrFrontmatterParse, err) + } + + fm.LeafWikiID = fm.stripSingleAndDoubleQuotes(fm.LeafWikiID) + fm.LeafWikiTitle = fm.stripSingleAndDoubleQuotes(fm.LeafWikiTitle) + return fm, body, true, nil +} + +func BuildMarkdownWithFrontmatter(fm Frontmatter, body string) (string, error) { + // Avoid emitting empty frontmatter like "{}" + if strings.TrimSpace(fm.LeafWikiID) == "" { + return body, nil + } + + b, err := yaml.Marshal(fm) + if err != nil { + return "", err + } + + var out bytes.Buffer + out.WriteString("---\n") + out.Write(b) // yaml.v3 usually ends with \n, which is fine + out.WriteString("---\n") + out.WriteString(body) + return out.String(), nil +} diff --git a/internal/core/markdown/frontmatter_test.go b/internal/core/markdown/frontmatter_test.go new file mode 100644 index 00000000..bd704cd6 --- /dev/null +++ b/internal/core/markdown/frontmatter_test.go @@ -0,0 +1,399 @@ +package markdown + +import ( + "errors" + "testing" +) + +func TestSplitFrontmatter(t *testing.T) { + tests := []struct { + name string + input string + wantFM string + wantBody string + wantHas bool + }{ + { + name: "no frontmatter", + input: "# Hello\nWorld\n", + wantFM: "", + wantBody: "# Hello\nWorld\n", + wantHas: false, + }, + { + name: "simple frontmatter", + input: "---\nleafwiki_id: abc123\n---\n# Title\n", + wantFM: "leafwiki_id: abc123", + wantBody: "# Title\n", + wantHas: true, + }, + { + name: "frontmatter with blank line", + input: "---\nleafwiki_id: abc123\n\n---\nBody\n", + wantFM: "leafwiki_id: abc123\n", + wantBody: "Body\n", + wantHas: true, + }, + { + name: "frontmatter with comments", + input: "---\n# comment\nleafwiki_id: abc123\n---\nBody\n", + wantFM: "# comment\nleafwiki_id: abc123", + wantBody: "Body\n", + wantHas: true, + }, + { + name: "only separator at top (no YAML)", + input: "---\nHello\nWorld\n---\nBody\n", + wantFM: "", + wantBody: "---\nHello\nWorld\n---\nBody\n", + wantHas: false, + }, + { + name: "horizontal rule later in document", + input: "# Title\n\n---\n\nText\n", + wantFM: "", + wantBody: "# Title\n\n---\n\nText\n", + wantHas: false, + }, + { + name: "unclosed frontmatter", + input: "---\nleafwiki_id: abc123\nBody\n", + wantFM: "", + wantBody: "---\nleafwiki_id: abc123\nBody\n", + wantHas: false, + }, + { + name: "empty frontmatter block", + input: "---\n---\nBody\n", + wantFM: "", + wantBody: "---\n---\nBody\n", + wantHas: false, + }, + { + name: "frontmatter with windows line endings", + input: "---\r\nleafwiki_id: abc123\r\n---\r\nBody\r\n", + wantFM: "leafwiki_id: abc123", + wantBody: "Body\n", + wantHas: true, + }, + { + name: "frontmatter with BOM", + input: "\ufeff---\nleafwiki_id: abc123\n---\nBody\n", + wantFM: "leafwiki_id: abc123", + wantBody: "Body\n", + wantHas: true, + }, + { + name: "yaml but no key colon (treated as no frontmatter)", + input: "---\n- item1\n- item2\n---\nBody\n", + wantFM: "", + wantBody: "---\n- item1\n- item2\n---\nBody\n", + wantHas: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fm, body, has := splitFrontmatter(tt.input) + + if has != tt.wantHas { + t.Fatalf("has = %v, want %v", has, tt.wantHas) + } + if fm != tt.wantFM { + t.Fatalf("frontmatter = %q, want %q", fm, tt.wantFM) + } + if body != tt.wantBody { + t.Fatalf("body = %q, want %q", body, tt.wantBody) + } + }) + } +} + +func TestParseFrontmatter(t *testing.T) { + tests := []struct { + name string + input string + wantFM Frontmatter + wantBody string + wantHas bool + wantErr bool + wantErrType error + }{ + { + name: "no frontmatter", + input: "# Hello\nWorld\n", + wantFM: Frontmatter{}, + wantBody: "# Hello\nWorld\n", + wantHas: false, + wantErr: false, + }, + { + name: "valid frontmatter with ID only", + input: "---\nleafwiki_id: abc123\n---\n# Title\nContent", + wantFM: Frontmatter{ + LeafWikiID: "abc123", + }, + wantBody: "# Title\nContent", + wantHas: true, + wantErr: false, + }, + { + name: "valid frontmatter with title only", + input: "---\nleafwiki_title: My Title\n---\n# Title\nContent", + wantFM: Frontmatter{ + LeafWikiTitle: "My Title", + }, + wantBody: "# Title\nContent", + wantHas: true, + wantErr: false, + }, + { + name: "valid frontmatter with both ID and title", + input: "---\nleafwiki_id: abc123\nleafwiki_title: My Title\n---\n# Title\nContent", + wantFM: Frontmatter{ + LeafWikiID: "abc123", + LeafWikiTitle: "My Title", + }, + wantBody: "# Title\nContent", + wantHas: true, + wantErr: false, + }, + { + name: "empty YAML frontmatter", + input: "---\nkey: value\n---\nBody", + wantFM: Frontmatter{}, + wantBody: "Body", + wantHas: true, + wantErr: false, + }, + { + name: "invalid YAML in frontmatter", + input: "---\nleafwiki_id: [invalid: yaml: structure\n---\nBody", + wantFM: Frontmatter{}, + wantBody: "---\nleafwiki_id: [invalid: yaml: structure\n---\nBody", + wantHas: true, + wantErr: true, + wantErrType: ErrFrontmatterParse, + }, + { + name: "malformed YAML - unclosed brackets", + input: "---\nleafwiki_id: {unclosed\n---\nBody", + wantFM: Frontmatter{}, + wantBody: "---\nleafwiki_id: {unclosed\n---\nBody", + wantHas: true, + wantErr: true, + wantErrType: ErrFrontmatterParse, + }, + { + name: "frontmatter with extra fields (ignored)", + input: "---\nleafwiki_id: abc123\nextra_field: ignored\n---\nBody", + wantFM: Frontmatter{ + LeafWikiID: "abc123", + }, + wantBody: "Body", + wantHas: true, + wantErr: false, + }, + { + name: "frontmatter with whitespace in values", + input: "---\nleafwiki_id: \" abc123 \"\nleafwiki_title: \" My Title \"\n---\nBody", + wantFM: Frontmatter{ + LeafWikiID: " abc123 ", + LeafWikiTitle: " My Title ", + }, + wantBody: "Body", + wantHas: true, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fm, body, has, err := ParseFrontmatter(tt.input) + + if (err != nil) != tt.wantErr { + t.Fatalf("ParseFrontmatter() error = %v, wantErr %v", err, tt.wantErr) + } + + if tt.wantErr && tt.wantErrType != nil { + if !errors.Is(err, tt.wantErrType) { + t.Fatalf("ParseFrontmatter() error = %v, want error type %v", err, tt.wantErrType) + } + } + + if has != tt.wantHas { + t.Fatalf("has = %v, want %v", has, tt.wantHas) + } + + if fm != tt.wantFM { + t.Fatalf("frontmatter = %+v, want %+v", fm, tt.wantFM) + } + + if body != tt.wantBody { + t.Fatalf("body = %q, want %q", body, tt.wantBody) + } + }) + } +} + +func TestBuildMarkdownWithFrontmatter(t *testing.T) { + tests := []struct { + name string + fm Frontmatter + body string + want string + wantErr bool + }{ + { + name: "empty frontmatter struct", + fm: Frontmatter{}, + body: "# Title\nContent", + want: "# Title\nContent", + }, + { + name: "frontmatter with empty ID", + fm: Frontmatter{ + LeafWikiID: "", + }, + body: "# Title\nContent", + want: "# Title\nContent", + }, + { + name: "frontmatter with whitespace-only ID", + fm: Frontmatter{ + LeafWikiID: " ", + }, + body: "# Title\nContent", + want: "# Title\nContent", + }, + { + name: "frontmatter with ID only", + fm: Frontmatter{ + LeafWikiID: "abc123", + }, + body: "# Title\nContent", + want: "---\nleafwiki_id: abc123\n---\n# Title\nContent", + }, + { + name: "frontmatter with title only", + fm: Frontmatter{ + LeafWikiTitle: "My Title", + }, + body: "# Title\nContent", + want: "# Title\nContent", + }, + { + name: "frontmatter with both ID and title", + fm: Frontmatter{ + LeafWikiID: "abc123", + LeafWikiTitle: "My Title", + }, + body: "# Title\nContent", + want: "---\nleafwiki_id: abc123\nleafwiki_title: My Title\n---\n# Title\nContent", + }, + { + name: "empty body", + fm: Frontmatter{ + LeafWikiID: "abc123", + }, + body: "", + want: "---\nleafwiki_id: abc123\n---\n", + }, + { + name: "body with newlines", + fm: Frontmatter{ + LeafWikiID: "abc123", + }, + body: "# Title\n\nParagraph 1\n\nParagraph 2\n", + want: "---\nleafwiki_id: abc123\n---\n# Title\n\nParagraph 1\n\nParagraph 2\n", + }, + { + name: "frontmatter with special characters in values", + fm: Frontmatter{ + LeafWikiID: "abc-123_xyz", + LeafWikiTitle: "Title: With Special & Characters", + }, + body: "Content", + want: "---\nleafwiki_id: abc-123_xyz\nleafwiki_title: 'Title: With Special & Characters'\n---\nContent", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := BuildMarkdownWithFrontmatter(tt.fm, tt.body) + + if (err != nil) != tt.wantErr { + t.Fatalf("BuildMarkdownWithFrontmatter() error = %v, wantErr %v", err, tt.wantErr) + } + + if got != tt.want { + t.Fatalf("BuildMarkdownWithFrontmatter() =\n%q\nwant:\n%q", got, tt.want) + } + }) + } +} + +func TestParseFrontmatterAndBuildRoundtrip(t *testing.T) { + tests := []struct { + name string + input string + wantBody string + }{ + { + name: "no frontmatter", + input: "# Title\nContent", + wantBody: "# Title\nContent", + }, + { + name: "with ID only", + input: "---\nleafwiki_id: abc123\n---\n# Title\nContent", + wantBody: "# Title\nContent", + }, + { + name: "with ID and title", + input: "---\nleafwiki_id: abc123\nleafwiki_title: My Title\n---\n# Title\nContent", + wantBody: "# Title\nContent", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Parse the original markdown + fm, body, has, err := ParseFrontmatter(tt.input) + if err != nil { + t.Fatalf("ParseFrontmatter() error = %v", err) + } + + if body != tt.wantBody { + t.Fatalf("body after parse = %q, want %q", body, tt.wantBody) + } + + // Rebuild markdown with frontmatter + rebuilt, err := BuildMarkdownWithFrontmatter(fm, body) + if err != nil { + t.Fatalf("BuildMarkdownWithFrontmatter() error = %v", err) + } + + // Parse again to verify + fm2, body2, has2, err := ParseFrontmatter(rebuilt) + if err != nil { + t.Fatalf("ParseFrontmatter() second parse error = %v", err) + } + + // Check that has flag is consistent + if has != has2 { + t.Fatalf("has flag changed: first=%v, second=%v", has, has2) + } + + // Check frontmatter is preserved + if fm != fm2 { + t.Fatalf("frontmatter changed: first=%+v, second=%+v", fm, fm2) + } + + // Check body is preserved + if body != body2 { + t.Fatalf("body changed: first=%q, second=%q", body, body2) + } + }) + } +} diff --git a/internal/core/markdown/markdown.go b/internal/core/markdown/markdown.go new file mode 100644 index 00000000..d34fde03 --- /dev/null +++ b/internal/core/markdown/markdown.go @@ -0,0 +1,114 @@ +package markdown + +import ( + "errors" + "os" + "path" + "path/filepath" + "strings" + + "github.com/perber/wiki/internal/core/shared" +) + +type MarkdownFile struct { + path string + content string + fm Frontmatter +} + +func LoadMarkdownFile(filePath string) (*MarkdownFile, error) { + if !strings.EqualFold(filepath.Ext(filePath), ".md") { + return nil, errors.New("file is not a markdown file") + } + + raw, err := os.ReadFile(filePath) + if err != nil { + return nil, err + } + + yamlPart, content, has := splitFrontmatter(string(raw)) + + var fm Frontmatter + + if has { + _, err = fm.LoadFrontMatterFromContent(string(yamlPart)) + if err != nil { + return nil, err + } + } else { + fm = Frontmatter{} + } + + return &MarkdownFile{ + path: filePath, + content: content, + fm: fm, + }, nil +} + +func NewMarkdownFile(filePath string, content string, fm Frontmatter) *MarkdownFile { + return &MarkdownFile{ + path: filePath, + content: content, + fm: fm, + } +} + +func (mf *MarkdownFile) WriteToFile() error { + fmContent, err := BuildMarkdownWithFrontmatter(mf.fm, string(mf.content)) + if err != nil { + return err + } + + mode := os.FileMode(0o644) + if st, err := os.Stat(mf.path); err == nil { + mode = st.Mode() + } + + return shared.WriteFileAtomic(mf.path, []byte(fmContent), mode) +} + +func (mf *MarkdownFile) GetTitle() (string, error) { + // 1. Frontmatter title + if mf.fm.LeafWikiTitle != "" { + return strings.TrimSpace(mf.fm.LeafWikiTitle), nil + } + + // 2. First heading + title, err := mf.extractTitleFromFirstHeading() + if err == nil && title != "" { + return title, nil + } + + // 3. Filename fallback + base := path.Base(mf.path) + name := strings.TrimSuffix(base, path.Ext(base)) + return name, nil +} + +func (mf *MarkdownFile) extractTitleFromFirstHeading() (string, error) { + lines := strings.Split(string(mf.content), "\n") + for _, line := range lines { + line = strings.TrimSpace(line) + if strings.HasPrefix(line, "# ") { + return strings.TrimSpace(strings.TrimPrefix(line, "# ")), nil + } + } + return "", errors.New("no heading found") +} + +func (mf *MarkdownFile) GetContent() string { + return string(mf.content) +} + +func (mf *MarkdownFile) GetPath() string { + return mf.path +} + +func (mf *MarkdownFile) GetFrontmatter() Frontmatter { + return mf.fm +} + +func (mf *MarkdownFile) SetFrontmatterID(id string) { + mf.fm.LeafWikiID = id +} diff --git a/internal/core/markdown/markdown_test.go b/internal/core/markdown/markdown_test.go new file mode 100644 index 00000000..8b669dca --- /dev/null +++ b/internal/core/markdown/markdown_test.go @@ -0,0 +1,92 @@ +package markdown + +import ( + "testing" + + "github.com/perber/wiki/internal/test_utils" +) + +func TestPlanner_extractTitleFromMDFile_FrontmatterTitleWins(t *testing.T) { + tmp := t.TempDir() + abs := test_utils.WriteFile(t, tmp, "t.md", "---\ntitle: FM Title\n---\n\n# Heading") + + mdFile, err := LoadMarkdownFile(abs) + if err != nil { + t.Fatalf("err: %v", err) + } + title, err := mdFile.GetTitle() + if err != nil { + t.Fatalf("err: %v", err) + } + if title != "FM Title" { + t.Fatalf("title = %q", title) + } +} + +func TestPlanner_extractTitleFromMDFile_LeafwikiTitle(t *testing.T) { + tmp := t.TempDir() + abs := test_utils.WriteFile(t, tmp, "t.md", "---\nleafwiki_title: Leaf\n---\n\n# Heading") + + mdFile, err := LoadMarkdownFile(abs) + if err != nil { + t.Fatalf("err: %v", err) + } + title, err := mdFile.GetTitle() + if err != nil { + t.Fatalf("err: %v", err) + } + if title != "Leaf" { + t.Fatalf("title = %q", title) + } +} + +func TestPlanner_extractTitleFromMDFile_FirstHeadingFallback(t *testing.T) { + tmp := t.TempDir() + abs := test_utils.WriteFile(t, tmp, "t.md", "no fm\n\n# Heading Only\nx") + + mdFile, err := LoadMarkdownFile(abs) + if err != nil { + t.Fatalf("err: %v", err) + } + title, err := mdFile.GetTitle() + if err != nil { + t.Fatalf("err: %v", err) + } + if title != "Heading Only" { + t.Fatalf("title = %q", title) + } +} + +func TestPlanner_extractTitleFromMDFile_FilenameFallback(t *testing.T) { + tmp := t.TempDir() + abs := test_utils.WriteFile(t, tmp, "some-file.md", "no title") + + mdFile, err := LoadMarkdownFile(abs) + if err != nil { + t.Fatalf("err: %v", err) + } + title, err := mdFile.GetTitle() + if err != nil { + t.Fatalf("err: %v", err) + } + if title != "some-file" { + t.Fatalf("title = %q", title) + } +} + +func TestLoadMarkdownFile_UppercaseExtension(t *testing.T) { + tmp := t.TempDir() + abs := test_utils.WriteFile(t, tmp, "README.MD", "# Uppercase Extension\n\nThis file has .MD extension") + + mdFile, err := LoadMarkdownFile(abs) + if err != nil { + t.Fatalf("expected no error for .MD extension, got: %v", err) + } + title, err := mdFile.GetTitle() + if err != nil { + t.Fatalf("err: %v", err) + } + if title != "Uppercase Extension" { + t.Fatalf("title = %q, want %q", title, "Uppercase Extension") + } +} diff --git a/internal/core/shared/utils.go b/internal/core/shared/utils.go index bddb8f87..f64375d4 100644 --- a/internal/core/shared/utils.go +++ b/internal/core/shared/utils.go @@ -8,6 +8,7 @@ import ( "mime/multipart" "os" "path" + "runtime" "github.com/teris-io/shortid" ) @@ -36,6 +37,17 @@ func GenerateRandomPassword(length int) (string, error) { return string(password), nil } +func atomicReplace(src, dst string) error { + // On Windows, os.Rename fails if dst already exists. + // On Unix, Rename is atomic and replaces dst. + if runtime.GOOS == "windows" { + if err := os.Remove(dst); err != nil && !os.IsNotExist(err) { + return fmt.Errorf("remove existing file: %w", err) + } + } + return os.Rename(src, dst) +} + // WriteFileAtomic writes data to filename atomically by writing to a temp file // in the same directory and then renaming it over the target. func WriteFileAtomic(filename string, data []byte, perm os.FileMode) error { @@ -73,7 +85,7 @@ func WriteFileAtomic(filename string, data []byte, perm os.FileMode) error { return fmt.Errorf("close temp file: %w", err) } - if err := os.Rename(tmpName, filename); err != nil { + if err := atomicReplace(tmpName, filename); err != nil { return fmt.Errorf("rename temp file: %w", err) } diff --git a/internal/core/tools/reconstruct_tree.go b/internal/core/tools/reconstruct_tree.go new file mode 100644 index 00000000..726dc9b3 --- /dev/null +++ b/internal/core/tools/reconstruct_tree.go @@ -0,0 +1,10 @@ +package tools + +import ( + "github.com/perber/wiki/internal/core/tree" +) + +func ReconstructTreeFromFS(storageDir string) error { + treeService := tree.NewTreeService(storageDir) + return treeService.ReconstructTreeFromFS() +} diff --git a/internal/core/tree/errors.go b/internal/core/tree/errors.go index 5572d618..63bb9693 100644 --- a/internal/core/tree/errors.go +++ b/internal/core/tree/errors.go @@ -1,6 +1,9 @@ package tree -import "errors" +import ( + "errors" + "fmt" +) var ErrPageNotFound = errors.New("page not found") var ErrParentNotFound = errors.New("parent not found") @@ -10,3 +13,70 @@ var ErrPageAlreadyExists = errors.New("page already exists") var ErrMovePageCircularReference = errors.New("circular reference detected") var ErrPageCannotBeMovedToItself = errors.New("page cannot be moved to itself") var ErrInvalidSortOrder = errors.New("invalid sort order") +var ErrFileNotFound = errors.New("file not found") +var ErrDrift = errors.New("drift detected") +var ErrInvalidOperation = errors.New("invalid operation") +var ErrConvertNotAllowed = errors.New("convert not allowed") + +// DriftError represents a drift error with detailed information. +type DriftError struct { + NodeID string + Kind NodeKind + Path string + Reason string +} + +func (e *DriftError) Error() string { + return "drift detected: nodeID=" + e.NodeID + ", kind=" + string(e.Kind) + ", path=" + e.Path + ", reason=" + e.Reason +} + +func (e *DriftError) Unwrap() error { + return ErrDrift +} + +// InvalidOpError represents an invalid operation error with details. +type InvalidOpError struct { + Op string + Reason string +} + +func (e *InvalidOpError) Error() string { return fmt.Sprintf("%s: %s", e.Op, e.Reason) } +func (e *InvalidOpError) Unwrap() error { return ErrInvalidOperation } + +// PageAlreadyExistsError: Konflikt bei Create/Move/Rename +type PageAlreadyExistsError struct { + Path string +} + +func (e *PageAlreadyExistsError) Error() string { return fmt.Sprintf("already exists: %s", e.Path) } +func (e *PageAlreadyExistsError) Unwrap() error { return ErrPageAlreadyExists } + +// NotFoundError represents a not found error with details. +type NotFoundError struct { + Resource string + ID string + Path string +} + +func (e *NotFoundError) Error() string { + return fmt.Sprintf("%s not found: %s", e.Resource, e.ID) +} + +func (e *NotFoundError) Unwrap() error { + return ErrPageNotFound +} + +// ConvertNotAllowedError represents a convert not allowed error with details. +type ConvertNotAllowedError struct { + From NodeKind + To NodeKind + Reason string +} + +func (e *ConvertNotAllowedError) Error() string { + return fmt.Sprintf("cannot convert from %s to %s: %s", e.From, e.To, e.Reason) +} + +func (e *ConvertNotAllowedError) Unwrap() error { + return ErrConvertNotAllowed +} diff --git a/internal/core/tree/node_store.go b/internal/core/tree/node_store.go new file mode 100644 index 00000000..f01fdc56 --- /dev/null +++ b/internal/core/tree/node_store.go @@ -0,0 +1,954 @@ +package tree + +import ( + "encoding/json" + "errors" + "fmt" + "io" + "log/slog" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/perber/wiki/internal/core/markdown" + "github.com/perber/wiki/internal/core/shared" +) + +func fileExists(p string) bool { + _, err := os.Stat(p) + return err == nil +} + +type ResolvedNode struct { + Kind NodeKind + DirPath string + FilePath string + HasContent bool +} + +type NodeStore struct { + storageDir string + log *slog.Logger + slugger *SlugService +} + +func NewNodeStore(storageDir string) *NodeStore { + return &NodeStore{ + storageDir: storageDir, + log: slog.Default().With("component", "NodeStore"), + slugger: NewSlugService(), + } +} + +// writeIDToMarkdownFile writes a leafwiki_id to a markdown file's frontmatter and logs errors if the write fails +func (f *NodeStore) writeIDToMarkdownFile(mdFile *markdown.MarkdownFile, id string) { + mdFile.SetFrontmatterID(id) + if err := mdFile.WriteToFile(); err != nil { + f.log.Error("could not write leafwiki_id back to file", "path", mdFile.GetPath(), "error", err) + } +} + +func (f *NodeStore) LoadTree(filename string) (*PageNode, error) { + fullPath := filepath.Join(f.storageDir, filename) + + // check if file exists + if _, err := os.Stat(fullPath); os.IsNotExist(err) { + return &PageNode{ + ID: "root", + Slug: "root", + Title: "root", + Parent: nil, + Position: 0, + Children: []*PageNode{}, + Kind: NodeKindSection, + }, nil + } + + file, err := os.Open(fullPath) + if err != nil { + return nil, fmt.Errorf("open tree file %s: %w", fullPath, err) + } + defer file.Close() + data, err := io.ReadAll(file) + + if err != nil { + return nil, fmt.Errorf("read tree file %s: %w", fullPath, err) + } + + tree := &PageNode{} + if err := json.Unmarshal(data, tree); err != nil { + return nil, fmt.Errorf("unmarshal tree data %s: %w", fullPath, err) + } + + if tree.ID == "root" && tree.Kind == "" { + tree.Kind = NodeKindSection + } + + // assigns parent to children + f.assignParentToChildren(tree) + + return tree, nil +} + +func (f *NodeStore) ReconstructTreeFromFS() (*PageNode, error) { + root := &PageNode{ + ID: "root", + Slug: "root", + Title: "root", + Parent: nil, + Position: 0, + Children: []*PageNode{}, + Kind: NodeKindSection, + } + + rootDir := filepath.Join(f.storageDir, "root") + + info, err := os.Stat(rootDir) + if err != nil { + if os.IsNotExist(err) { + // No on-disk content yet; return an empty root tree. + return root, nil + } + return nil, fmt.Errorf("stat root dir %s: %w", rootDir, err) + } + + if !info.IsDir() { + return nil, fmt.Errorf("root path %s is not a directory", rootDir) + } + + if err := f.reconstructTreeRecursive(rootDir, root); err != nil { + return nil, fmt.Errorf("reconstruct tree from fs: %w", err) + } + + return root, nil +} +func (f *NodeStore) reconstructTreeRecursive(currentPath string, parent *PageNode) error { + entries, err := os.ReadDir(currentPath) + if err != nil { + return fmt.Errorf("read dir %s: %w", currentPath, err) + } + + // stable, deterministic ordering (case-insensitive, with case-sensitive tie-breaker) + sort.SliceStable(entries, func(i, j int) bool { + li := strings.ToLower(entries[i].Name()) + lj := strings.ToLower(entries[j].Name()) + if li == lj { + return entries[i].Name() < entries[j].Name() + } + return li < lj + }) + + for _, entry := range entries { + name := entry.Name() + + // optional: skip hidden stuff + if strings.HasPrefix(name, ".") { + continue + } + + // defaults + title := name + id, err := shared.GenerateUniqueID() + if err != nil { + return fmt.Errorf("generate unique ID: %w", err) + } + + if entry.IsDir() { + // Normalize and validate the directory name as a slug + normalizedSlug := normalizeSlug(name) + if err := f.slugger.IsValidSlug(normalizedSlug); err != nil { + f.log.Error("skipping directory with invalid slug", "directory", name, "normalized", normalizedSlug, "error", err) + continue + } + + indexPath := filepath.Join(currentPath, name, "index.md") + if fileExists(indexPath) { + mdFile, err := markdown.LoadMarkdownFile(indexPath) + if err != nil { + f.log.Error("could not load index.md", "path", indexPath, "error", err) + // fall back to default title and generated ID, but still add the section and recurse + } else { + title, err = mdFile.GetTitle() + if err != nil { + f.log.Error("could not extract title from index.md", "path", indexPath, "error", err) + // keep default title; still add the section and recurse + } + if mdFile.GetFrontmatter().LeafWikiID != "" { + id = mdFile.GetFrontmatter().LeafWikiID + } else { + // Generated ID needs to be written back + f.writeIDToMarkdownFile(mdFile, id) + } + } + } + + child := &PageNode{ + ID: id, + Slug: normalizedSlug, + Title: title, + Parent: parent, + Position: len(parent.Children), + Children: []*PageNode{}, + Kind: NodeKindSection, + } + parent.Children = append(parent.Children, child) + + if err := f.reconstructTreeRecursive(filepath.Join(currentPath, name), child); err != nil { + return err + } + continue + } + + // file + ext := filepath.Ext(name) + if !strings.EqualFold(ext, ".md") { + continue + } + + // Normalize and validate the filename (without .md) as a slug + baseFilename := strings.TrimSuffix(name, ext) + // skip index.md (handled by section case) + if strings.EqualFold(baseFilename, "index") { + continue + } + normalizedSlug := normalizeSlug(baseFilename) + if err := f.slugger.IsValidSlug(normalizedSlug); err != nil { + f.log.Error("skipping file with invalid slug", "file", name, "normalized", normalizedSlug, "error", err) + continue + } + + filePath := filepath.Join(currentPath, name) + + mdFile, err := markdown.LoadMarkdownFile(filePath) + if err != nil { + f.log.Error("could not load markdown file", "path", filePath, "error", err) + continue + } + title, err = mdFile.GetTitle() + if err != nil { + f.log.Error("could not extract title from file", "path", filePath, "error", err) + continue + } + if mdFile.GetFrontmatter().LeafWikiID != "" { + id = mdFile.GetFrontmatter().LeafWikiID + } else { + // Generated ID needs to be written back + f.writeIDToMarkdownFile(mdFile, id) + } + + child := &PageNode{ + ID: id, + Slug: normalizedSlug, + Title: title, + Parent: parent, + Position: len(parent.Children), + Children: nil, + Kind: NodeKindPage, + } + parent.Children = append(parent.Children, child) + } + + return nil +} + +func (f *NodeStore) assignParentToChildren(parent *PageNode) { + for _, child := range parent.Children { + child.Parent = parent + f.assignParentToChildren(child) + } +} + +func (f *NodeStore) SaveTree(filename string, tree *PageNode) error { + if tree == nil { + return errors.New("a tree is required") + } + + fullPath := filepath.Join(f.storageDir, filename) + + data, err := json.Marshal(tree) + if err != nil { + return fmt.Errorf("could not marshal tree: %w", err) + } + + if err := shared.WriteFileAtomic(fullPath, data, 0o644); err != nil { + return fmt.Errorf("could not atomically write tree file: %w", err) + } + + return nil +} + +// CreatePage creates a new page file under the given parent entry +func (f *NodeStore) CreatePage(parentEntry *PageNode, newEntry *PageNode) error { + if parentEntry == nil { + return &InvalidOpError{Op: "CreatePage", Reason: "a parent entry is required"} + } + if newEntry == nil { + return &InvalidOpError{Op: "CreatePage", Reason: "a new entry is required"} + } + if newEntry.ID == "root" { + return &InvalidOpError{Op: "CreatePage", Reason: "cannot create root"} + } + + // Pages can only be created under sections (Option A) + if parentEntry.Kind != NodeKindSection { + return &InvalidOpError{Op: "CreatePage", Reason: "parent entry must be a section"} + } + if newEntry.Kind != NodeKindPage { + return &InvalidOpError{Op: "CreatePage", Reason: "new entry must be a page"} + } + + // Parent directory is determined by the tree path + parentDir, err := f.dirPathForNode(parentEntry) + if err != nil { + return err + } + + // Ensure the parent directory exists (idempotent) + if err := os.MkdirAll(parentDir, 0o755); err != nil { + return fmt.Errorf("could not ensure parent directory exists: %w", err) + } + + // Destination paths + destBase := filepath.Join(parentDir, newEntry.Slug) + destFile := destBase + ".md" + destDir := destBase + + // Reject if either a file OR a directory with same slug exists + if fileExists(destFile) || fileExists(destDir) { + return &PageAlreadyExistsError{Path: destBase} + } + + // Build and write file + fm := markdown.Frontmatter{LeafWikiID: newEntry.ID} + md, err := markdown.BuildMarkdownWithFrontmatter(fm, "# "+newEntry.Title+"\n") + if err != nil { + return fmt.Errorf("could not build markdown with frontmatter: %w", err) + } + + if err := shared.WriteFileAtomic(destFile, []byte(md), 0o644); err != nil { + return fmt.Errorf("could not create file: %w", err) + } + + return nil +} + +// CreateSection creates a new section (folder) under the given parent entry. +func (f *NodeStore) CreateSection(parentEntry *PageNode, newEntry *PageNode) error { + if parentEntry == nil { + return &InvalidOpError{Op: "CreateSection", Reason: "a parent entry is required"} + } + if newEntry == nil { + return &InvalidOpError{Op: "CreateSection", Reason: "a new entry is required"} + } + if newEntry.ID == "root" { + return &InvalidOpError{Op: "CreateSection", Reason: "cannot create root"} + } + + // Sections can only be created under sections (Option A) + if parentEntry.Kind != NodeKindSection { + return &InvalidOpError{Op: "CreateSection", Reason: "parent entry must be a section"} + } + if newEntry.Kind != NodeKindSection { + return &InvalidOpError{Op: "CreateSection", Reason: "new entry must be a section"} + } + + // Parent directory from tree path + parentDir, err := f.dirPathForNode(parentEntry) + if err != nil { + return err + } + + // Ensure parent directory exists (idempotent) + if err := os.MkdirAll(parentDir, 0o755); err != nil { + return fmt.Errorf("could not ensure parent directory exists: %w", err) + } + + // Destination base paths + destBase := filepath.Join(parentDir, newEntry.Slug) + destFile := destBase + ".md" + destDir := destBase + + // Reject if either a file OR a directory with same slug exists + if fileExists(destFile) || fileExists(destDir) { + return &PageAlreadyExistsError{Path: destBase} + } + + // Create the folder for the section (no index.md by default) + if err := os.MkdirAll(destDir, 0o755); err != nil { + return fmt.Errorf("could not create section folder: %w", err) + } + + return nil +} + +// UpsertContent updates the content of a page file on disk +// It creates the file if it does not exist also for sections (index.md) +func (f *NodeStore) UpsertContent(entry *PageNode, content string) error { + if entry == nil { + return &InvalidOpError{Op: "UpsertContent", Reason: "an entry is required"} + } + + // Determine expected write path + filePath, err := f.contentPathForNodeWrite(entry) + if err != nil { + return err + } + + mode := os.FileMode(0o644) + if st, err := os.Stat(filePath); err == nil { + mode = st.Mode() + } + + // Update the file content + fm := markdown.Frontmatter{LeafWikiID: strings.TrimSpace(entry.ID), LeafWikiTitle: strings.TrimSpace(entry.Title)} + contentWithFM, err := markdown.BuildMarkdownWithFrontmatter(fm, content) + if err != nil { + return fmt.Errorf("could not build markdown with frontmatter: %w", err) + } + if err := shared.WriteFileAtomic(filePath, []byte(contentWithFM), mode); err != nil { + return fmt.Errorf("could not write to file atomically: %w", err) + } + + return nil +} + +// MoveNode moves a page to a other node +func (f *NodeStore) MoveNode(entry *PageNode, parentEntry *PageNode) error { + if entry == nil { + return &InvalidOpError{Op: "MoveNode", Reason: "an entry is required"} + } + if parentEntry == nil { + return &InvalidOpError{Op: "MoveNode", Reason: "a parent entry is required"} + } + if entry.ID == "root" { + return &InvalidOpError{Op: "MoveNode", Reason: "cannot move root"} + } + + // Option A: children only under sections (defensive guard) + if parentEntry.Kind != NodeKindSection { + return &InvalidOpError{Op: "MoveNode", Reason: fmt.Sprintf("parent entry must be a section, got %q", parentEntry.Kind)} + } + + // Parent directory path from tree + parentDir, err := f.dirPathForNode(parentEntry) + if err != nil { + return err + } + + if err := os.MkdirAll(parentDir, 0o755); err != nil { + return fmt.Errorf("could not ensure parent directory exists: %w", err) + } + + // Current base path from tree (still at old location; TreeService updates Parent after success) + oldBase, err := f.dirPathForNode(entry) + if err != nil { + return err + } + oldFile := oldBase + ".md" + oldDir := oldBase + + // Destination base path (same slug, under new parent) + destBase := filepath.Join(parentDir, entry.Slug) + destFile := destBase + ".md" + destDir := destBase + + // Collision checks: refuse if destination already exists as file OR dir + if fileExists(destFile) || fileExists(destDir) { + return &PageAlreadyExistsError{Path: destBase} + } + + // STRICT: follow tree.Kind exactly (no disk fallbacks) + switch entry.Kind { + case NodeKindSection: + // src must be a directory + info, err := os.Stat(oldDir) + if err != nil { + if os.IsNotExist(err) { + f.log.Warn("move drift: expected folder missing", "nodeID", entry.ID, "expectedDir", oldDir) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: oldDir, Reason: "expected folder missing"} + } + return fmt.Errorf("stat source dir: %w", err) + } + if !info.IsDir() { + f.log.Warn("move drift: expected folder but found file", "nodeID", entry.ID, "expectedDir", oldDir) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: oldDir, Reason: "expected folder but found file"} + } + + if err := os.Rename(oldDir, destDir); err != nil { + return fmt.Errorf("could not move folder: %w", err) + } + + case NodeKindPage: + // src must be a file + info, err := os.Stat(oldFile) + if err != nil { + if os.IsNotExist(err) { + f.log.Warn("move drift: expected file missing", "nodeID", entry.ID, "expectedFile", oldFile) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: oldFile, Reason: "expected file missing"} + } + return fmt.Errorf("stat source file: %w", err) + } + if info.IsDir() { + f.log.Warn("move drift: expected file but found folder", "nodeID", entry.ID, "expectedFile", oldFile) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: oldFile, Reason: "expected file but found folder"} + } + + if err := os.Rename(oldFile, destFile); err != nil { + return fmt.Errorf("could not move file: %w", err) + } + + default: + return &InvalidOpError{Op: "MoveNode", Reason: fmt.Sprintf("unknown node kind: %q", entry.Kind)} + } + + return nil +} + +// DeletePage deletes a page file from disk +func (f *NodeStore) DeletePage(entry *PageNode) error { + if entry == nil { + return &InvalidOpError{Op: "DeletePage", Reason: "an entry is required"} + } + if entry.ID == "root" { + return &InvalidOpError{Op: "DeletePage", Reason: "cannot delete root"} + } + if entry.Kind != NodeKindPage && entry.Kind != "" { + return &InvalidOpError{Op: "DeletePage", Reason: "entry must be a page"} + } + + base, err := f.dirPathForNode(entry) + if err != nil { + return err + } + file := base + ".md" + + info, err := os.Stat(file) + if err != nil { + if os.IsNotExist(err) { + f.log.Warn("delete drift: expected page file missing", "nodeID", entry.ID, "expectedFile", file) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: file, Reason: "expected file missing"} + } + return fmt.Errorf("stat file: %w", err) + } + if info.IsDir() { + f.log.Warn("delete drift: expected file but found folder", "nodeID", entry.ID, "expectedFile", file) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: file, Reason: "expected file but found folder"} + } + + if err := os.Remove(file); err != nil { + return fmt.Errorf("could not delete file: %w", err) + } + + return nil +} + +// DeleteSection deletes a section folder from disk +func (f *NodeStore) DeleteSection(entry *PageNode) error { + if entry == nil { + return &InvalidOpError{Op: "DeleteSection", Reason: "an entry is required"} + } + if entry.ID == "root" { + return &InvalidOpError{Op: "DeleteSection", Reason: "cannot delete root"} + } + if entry.Kind != NodeKindSection { + return &InvalidOpError{Op: "DeleteSection", Reason: "entry must be a section"} + } + + dir, err := f.dirPathForNode(entry) + if err != nil { + return err + } + + info, err := os.Stat(dir) + if err != nil { + if os.IsNotExist(err) { + f.log.Warn("delete drift: expected section folder missing", "nodeID", entry.ID, "expectedDir", dir) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: dir, Reason: "expected folder missing"} + } + return fmt.Errorf("stat dir: %w", err) + } + if !info.IsDir() { + f.log.Warn("delete drift: expected folder but found file", "nodeID", entry.ID, "expectedDir", dir) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: dir, Reason: "expected folder but found file"} + } + + if err := os.RemoveAll(dir); err != nil { + return fmt.Errorf("could not delete folder: %w", err) + } + + return nil +} + +// RenameNode renames a node's slug on disk +func (f *NodeStore) RenameNode(entry *PageNode, newSlug string) error { + if entry == nil { + return &InvalidOpError{Op: "RenameNode", Reason: "an entry is required"} + } + if strings.TrimSpace(newSlug) == "" { + return &InvalidOpError{Op: "RenameNode", Reason: "new slug must not be empty"} + } + if entry.Slug == newSlug { + return nil + } + if entry.ID == "root" { + return &InvalidOpError{Op: "RenameNode", Reason: "cannot rename root"} + } + + // old base path computed from current entry (still has old slug) + oldBase, err := f.dirPathForNode(entry) + if err != nil { + return err + } + + // new base path: same parent dir, last segment replaced + newBase := filepath.Join(filepath.Dir(oldBase), newSlug) + + // destination collision checks + if fileExists(newBase+".md") || fileExists(newBase) { + return &PageAlreadyExistsError{Path: newBase} + } + // perform rename based on kind + switch entry.Kind { + case NodeKindSection: + srcDir := oldBase + dstDir := newBase + + // strict: source dir must exist and be dir + info, err := os.Stat(srcDir) + if err != nil { + if os.IsNotExist(err) { + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: srcDir, Reason: "expected folder missing"} + } + return fmt.Errorf("stat source dir: %w", err) + } + if !info.IsDir() { + // drift: tree says section but disk is not a folder + f.log.Warn("drift: tree says section but disk is not a folder", "srcDir", srcDir) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: srcDir, Reason: "expected folder but found file"} + } + + if err := os.Rename(srcDir, dstDir); err != nil { + return fmt.Errorf("could not rename folder: %w", err) + } + return nil + case NodeKindPage: + srcFile := oldBase + ".md" + dstFile := newBase + ".md" + + // strict: source file must exist + info, err := os.Stat(srcFile) + if err != nil { + if os.IsNotExist(err) { + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: srcFile, Reason: "expected file missing"} + } + return fmt.Errorf("stat source file: %w", err) + } + if info.IsDir() { + // drift: tree says page but disk is a dir + f.log.Warn("drift: tree says page but disk is a dir", "srcFile", srcFile) + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: srcFile, Reason: "expected file but found folder"} + } + + if err := os.Rename(srcFile, dstFile); err != nil { + return fmt.Errorf("could not rename file: %w", err) + } + return nil + + default: + return &InvalidOpError{Op: "RenameNode", Reason: fmt.Sprintf("unknown node kind: %q", entry.Kind)} + } +} + +// ReadPageRaw returns the raw content of a page including frontmatter +func (f *NodeStore) ReadPageRaw(entry *PageNode) (string, error) { + filePath, err := f.contentPathForNodeRead(entry) + if err != nil { + return "", err + } + + // Sections may legitimately have no content (missing index.md) + if entry.Kind == NodeKindSection { + if !fileExists(filePath) { + return "", nil + } + } else { + // Pages must have a content file + if !fileExists(filePath) { + return "", &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: filePath, Reason: "expected page file missing"} + } + } + + raw, err := os.ReadFile(filePath) + if err != nil { + return "", err + } + return string(raw), nil +} + +// ReadPageContent returns the content of a page +func (f *NodeStore) ReadPageContent(entry *PageNode) (string, error) { + raw, err := f.ReadPageRaw(entry) + if err != nil { + return "", err + } + _, content, _, err := markdown.ParseFrontmatter(string(raw)) + if err != nil { + return string(raw), err + } + return content, nil +} + +// SyncFrontmatterIfExists updates the frontmatter of a page file on disk if it exists +func (f *NodeStore) SyncFrontmatterIfExists(entry *PageNode) error { + if entry == nil { + return &InvalidOpError{Op: "SyncFrontmatterIfExists", Reason: "an entry is required"} + } + + // keine side effects: write-path NICHT verwenden (würde mkdir + bei Section implizit index.md Pfad liefern) + // aber read-path reicht, weil wir nur syncen, wenn Datei existiert + filePath, err := f.contentPathForNodeRead(entry) + if err != nil { + return err + } + + // Datei existiert? + if !fileExists(filePath) { + // Page: muss existieren + if entry.Kind == NodeKindPage || entry.Kind == "" { + return &DriftError{NodeID: entry.ID, Kind: entry.Kind, Path: filePath, Reason: "expected page file missing"} + } + // Section: kein index.md -> NICHT erzeugen + return nil + } + + raw, err := os.ReadFile(filePath) + if err != nil { + return fmt.Errorf("read content file: %w", err) + } + + fm, body, has, err := markdown.ParseFrontmatter(string(raw)) + if err != nil { + return fmt.Errorf("parse frontmatter: %w", err) + } + if !has { + fm = markdown.Frontmatter{} + } + + // Tree-SoT invariants + fm.LeafWikiID = strings.TrimSpace(entry.ID) + fm.LeafWikiTitle = strings.TrimSpace(entry.Title) + + out, err := markdown.BuildMarkdownWithFrontmatter(fm, body) + if err != nil { + return fmt.Errorf("build markdown: %w", err) + } + + mode := os.FileMode(0o644) + if st, err := os.Stat(filePath); err == nil { + mode = st.Mode() + } + + if err := shared.WriteFileAtomic(filePath, []byte(out), mode); err != nil { + return fmt.Errorf("write file atomically: %w", err) + } + return nil +} + +func (f *NodeStore) dirPathForNode(entry *PageNode) (string, error) { + if entry == nil { + return "", &InvalidOpError{Op: "dirPathForNode", Reason: "an entry is required"} + } + return filepath.Join(f.storageDir, GeneratePathFromPageNode(entry)), nil +} + +// contentPathForNodeRead returns the expected content file path for a node +// based purely on the tree Kind (NO side effects, NO mkdir): +// - page => .md +// - section => /index.md +func (f *NodeStore) contentPathForNodeRead(entry *PageNode) (string, error) { + if entry == nil { + return "", &InvalidOpError{Op: "contentPathForNodeRead", Reason: "an entry is required"} + } + + base, err := f.dirPathForNode(entry) + if err != nil { + return "", err + } + switch entry.Kind { + case NodeKindSection: + return filepath.Join(base, "index.md"), nil + case NodeKindPage: + return base + ".md", nil + default: + return "", &InvalidOpError{Op: "contentPathForNodeRead", Reason: fmt.Sprintf("unknown node kind: %q", entry.Kind)} + } +} + +// contentPathForNodeWrite returns the expected content file path for a node +// based purely on the tree Kind (MAY create dirs for sections): +// - page => .md +// - section => /index.md (ensures directory exists) +func (f *NodeStore) contentPathForNodeWrite(entry *PageNode) (string, error) { + if entry == nil { + return "", &InvalidOpError{Op: "contentPathForNodeWrite", Reason: "an entry is required"} + } + + base, err := f.dirPathForNode(entry) + if err != nil { + return "", err + } + switch entry.Kind { + case NodeKindSection: + if err := os.MkdirAll(base, 0o755); err != nil { + return "", fmt.Errorf("could not ensure folder: %w", err) + } + return filepath.Join(base, "index.md"), nil + + case NodeKindPage: + return base + ".md", nil + + default: + return "", &InvalidOpError{Op: "contentPathForNodeWrite", Reason: fmt.Sprintf("unknown node kind: %q", entry.Kind)} + } +} + +// resolveNode inspects the filesystem to determine if the given PageNode +// corresponds to a file or folder, returning a ResolvedNode with details. +// This function is only used for migration. Other parts of the system should rely on contentPathForNodeRead or contentPathForNodeWrite. +// If this function is used outside of migration, it may lead to inconsistencies between the tree and the actual filesystem state. +func (f *NodeStore) resolveNode(entry *PageNode) (*ResolvedNode, error) { + basePath, err := f.dirPathForNode(entry) + if err != nil { + return nil, err + } + + // 1) File? + if _, err := os.Stat(basePath + ".md"); err == nil { + f.log.Debug("resolved as file node", "filePath", basePath+".md") + return &ResolvedNode{ + Kind: NodeKindPage, + FilePath: basePath + ".md", + HasContent: true, + }, nil + } + + // 2) Folder? + if info, err := os.Stat(basePath); err == nil && info.IsDir() { + index := filepath.Join(basePath, "index.md") + if _, err := os.Stat(index); err == nil { + f.log.Debug("resolved as section node with content", "dirPath", basePath, "filePath", index) + return &ResolvedNode{ + Kind: NodeKindSection, + DirPath: basePath, + FilePath: index, + HasContent: true, + }, nil + } + f.log.Debug("resolved as section node without content", "dirPath", basePath) + return &ResolvedNode{ + Kind: NodeKindSection, + DirPath: basePath, + FilePath: "", // no index.md present + HasContent: false, + }, nil + } + + return nil, &NotFoundError{Resource: "node", Path: basePath, ID: entry.ID} +} + +// ConvertNode converts the on-disk representation between page <-> folder. +// NOTE: TreeService must ensure folder->page is allowed (no children). +func (f *NodeStore) ConvertNode(entry *PageNode, target NodeKind) error { + if entry == nil { + return &InvalidOpError{Op: "ConvertNode", Reason: "an entry is required"} + } + + base, err := f.dirPathForNode(entry) + if err != nil { + return err + } + filePath := base + ".md" + folderPath := base + indexPath := filepath.Join(folderPath, "index.md") + + switch target { + case NodeKindSection: + // page -> folder + if _, err := os.Stat(filePath); err == nil { + if err := os.MkdirAll(folderPath, 0o755); err != nil { + return fmt.Errorf("could not create folder: %w", err) + } + // keep content: .md -> /index.md + if err := os.Rename(filePath, indexPath); err != nil { + return fmt.Errorf("could not move page into folder: %w", err) + } + return nil + } + // already folder (or missing) -> ensure dir exists + if err := os.MkdirAll(folderPath, 0o755); err != nil { + return fmt.Errorf("could not ensure folder exists: %w", err) + } + return nil + + case NodeKindPage: + // folder -> page (strict, safe order) + info, err := os.Stat(folderPath) + if err != nil { + if os.IsNotExist(err) { + // nothing to do if folder doesn't exist + return nil + } + return err + } + if !info.IsDir() { + return &DriftError{NodeID: entry.ID, Kind: NodeKindSection, Path: folderPath, Reason: "expected folder but found file"} + } + + entries, err := os.ReadDir(folderPath) + if err != nil { + return err + } + + // allow only: + // - empty folder + // - folder with only index.md + allowed := true + for _, e := range entries { + name := e.Name() + if name == "index.md" { + continue + } + allowed = false + break + } + if !allowed { + return &ConvertNotAllowedError{From: NodeKindSection, To: NodeKindPage, Reason: "folder not empty"} + } + + // now do the move/create + if fileExists(indexPath) { + if err := os.Rename(indexPath, filePath); err != nil { + return fmt.Errorf("could not move index to page: %w", err) + } + } else { + fm := markdown.Frontmatter{LeafWikiID: entry.ID, LeafWikiTitle: entry.Title} + md, err := markdown.BuildMarkdownWithFrontmatter(fm, "") + if err != nil { + return err + } + if err := shared.WriteFileAtomic(filePath, []byte(md), 0o644); err != nil { + return fmt.Errorf("could not write page file: %w", err) + } + } + + // remove folder (must be empty now) + if err := os.Remove(folderPath); err != nil { + return err + } + return nil + + default: + return &InvalidOpError{Op: "ConvertNode", Reason: fmt.Sprintf("unknown target kind: %q", target)} + } +} diff --git a/internal/core/tree/node_store_reconstruct_test.go b/internal/core/tree/node_store_reconstruct_test.go new file mode 100644 index 00000000..6c316a77 --- /dev/null +++ b/internal/core/tree/node_store_reconstruct_test.go @@ -0,0 +1,322 @@ +package tree + +import ( + "path/filepath" + "sort" + "strings" + "testing" + + "github.com/perber/wiki/internal/core/markdown" +) + +func findChildBySlug(t *testing.T, parent *PageNode, slug string) *PageNode { + t.Helper() + for _, ch := range parent.Children { + if ch.Slug == slug { + return ch + } + } + t.Fatalf("child with slug %q not found under %q", slug, parent.Slug) + return nil +} + +func slugs(children []*PageNode) []string { + out := make([]string, 0, len(children)) + for _, c := range children { + out = append(out, c.Slug) + } + return out +} + +// --- tests --- + +func TestNodeStore_ReconstructTreeFromFS_EmptyStorage_ReturnsRoot(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + tree, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS: %v", err) + } + + if tree == nil || tree.ID != "root" || tree.Kind != NodeKindSection { + t.Fatalf("unexpected root: %#v", tree) + } + if tree.Parent != nil { + t.Fatalf("expected root parent nil") + } + if len(tree.Children) != 0 { + t.Fatalf("expected root to have no children, got %d", len(tree.Children)) + } +} + +func TestNodeStore_ReconstructTreeFromFS_BuildsSectionsAndPages_SkipsIndexMdAsPage(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + // FS layout: + // /docs/index.md (section content) + // /docs/intro.md (page) + // /readme.md (page at root) + mustMkdir(t, filepath.Join(tmp, "root", "docs")) + + secIndex := `--- +leafwiki_id: sec-docs +leafwiki_title: Documentation +--- +# Section` + mustWriteFile(t, filepath.Join(tmp, "root", "docs", "index.md"), secIndex, 0o644) + + pageIntro := `--- +leafwiki_id: page-intro +leafwiki_title: Introduction +--- +# Intro` + mustWriteFile(t, filepath.Join(tmp, "root", "docs", "intro.md"), pageIntro, 0o644) + + rootPage := `--- +leafwiki_id: page-readme +leafwiki_title: Readme +--- +# Readme` + mustWriteFile(t, filepath.Join(tmp, "root", "readme.md"), rootPage, 0o644) + + tree, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS: %v", err) + } + + // root has: docs(section), readme(page) + docs := findChildBySlug(t, tree, "docs") + if docs.Kind != NodeKindSection { + t.Fatalf("expected docs to be section, got %q", docs.Kind) + } + // section title/id from index frontmatter + if docs.ID != "sec-docs" { + t.Fatalf("expected docs.ID=sec-docs, got %q", docs.ID) + } + if docs.Title != "Documentation" { + t.Fatalf("expected docs.Title=Documentation, got %q", docs.Title) + } + + // ensure index.md wasn't turned into a page child + for _, ch := range docs.Children { + if ch.Slug == "index" { + t.Fatalf("index.md must be skipped as page, but found slug index") + } + } + + intro := findChildBySlug(t, docs, "intro") + if intro.Kind != NodeKindPage { + t.Fatalf("expected intro to be page, got %q", intro.Kind) + } + // page title/id from frontmatter + if intro.ID != "page-intro" { + t.Fatalf("expected intro.ID=page-intro, got %q", intro.ID) + } + if intro.Title != "Introduction" { + t.Fatalf("expected intro.Title=Introduction, got %q", intro.Title) + } + + readme := findChildBySlug(t, tree, "readme") + if readme.Kind != NodeKindPage { + t.Fatalf("expected readme to be page, got %q", readme.Kind) + } + if readme.ID != "page-readme" { + t.Fatalf("expected readme.ID=page-readme, got %q", readme.ID) + } + if readme.Title != "Readme" { + t.Fatalf("expected readme.Title=Readme, got %q", readme.Title) + } + + // parent pointers + if docs.Parent == nil || docs.Parent.ID != "root" { + t.Fatalf("expected docs parent root, got %#v", docs.Parent) + } + if intro.Parent == nil || intro.Parent.ID != docs.ID { + t.Fatalf("expected intro parent docs, got %#v", intro.Parent) + } +} + +func TestNodeStore_ReconstructTreeFromFS_SectionWithoutIndex_UsesDirNameAsTitle(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + // FS: /emptysec/ (no index.md) + mustMkdir(t, filepath.Join(tmp, "root", "emptysec")) + + tree, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS: %v", err) + } + + sec := findChildBySlug(t, tree, "emptysec") + if sec.Kind != NodeKindSection { + t.Fatalf("expected section, got %q", sec.Kind) + } + // title defaults to folder name (per your code) + if sec.Title != "emptysec" { + t.Fatalf("expected title=emptysec, got %q", sec.Title) + } + if strings.TrimSpace(sec.ID) == "" { + t.Fatalf("expected some generated id, got empty") + } +} + +func TestNodeStore_ReconstructTreeFromFS_PageWithoutFrontmatter_FallsBackToHeadlineTitle(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + // FS: /plain.md (no fm) + mustWriteFile(t, filepath.Join(tmp, "root", "plain.md"), "# hello\n", 0o644) + + tree, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS: %v", err) + } + + p := findChildBySlug(t, tree, "plain") + if p.Kind != NodeKindPage { + t.Fatalf("expected page, got %q", p.Kind) + } + + // title fallback should be headline + if p.Title != "hello" { + t.Fatalf("expected title fallback to slug 'plain', got %q", p.Title) + } + if strings.TrimSpace(p.ID) == "" { + // should still have generated id (unless you later decide to keep empty) + t.Fatalf("expected generated id, got empty") + } +} + +func TestNodeStore_ReconstructTreeFromFS_PositionsAreContiguous(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + // Create several files/dirs + mustWriteFile(t, filepath.Join(tmp, "root", "b.md"), "# b", 0o644) + mustWriteFile(t, filepath.Join(tmp, "root", "a.md"), "# a", 0o644) + mustMkdir(t, filepath.Join(tmp, "root", "zsec")) + + tree, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS: %v", err) + } + + // Positions should be 0..n-1 regardless of order + seen := make([]int, 0, len(tree.Children)) + for _, ch := range tree.Children { + seen = append(seen, ch.Position) + } + sort.Ints(seen) + for i := range seen { + if seen[i] != i { + t.Fatalf("expected contiguous positions 0..%d, got %v (slugs=%v)", len(seen)-1, seen, slugs(tree.Children)) + } + } +} + + +func TestNodeStore_ReconstructTreeFromFS_WritesIDsBackToFiles(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + // Create files without leafwiki_id in frontmatter + mustWriteFile(t, filepath.Join(tmp, "root", "no-id.md"), "# No ID", 0o644) + mustMkdir(t, filepath.Join(tmp, "root", "section")) + mustWriteFile(t, filepath.Join(tmp, "root", "section", "index.md"), "# Section No ID", 0o644) + + // Run reconstruction + tree, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS: %v", err) + } + + // Get the page and section nodes + page := findChildBySlug(t, tree, "no-id") + section := findChildBySlug(t, tree, "section") + + // Verify that IDs were generated + if page.ID == "" { + t.Fatalf("expected page to have generated ID, got empty") + } + if section.ID == "" { + t.Fatalf("expected section to have generated ID, got empty") + } + + // Now reload the files and check that IDs were written back + pageMd, err := markdown.LoadMarkdownFile(filepath.Join(tmp, "root", "no-id.md")) + if err != nil { + t.Fatalf("failed to reload page: %v", err) + } + if pageMd.GetFrontmatter().LeafWikiID != page.ID { + t.Fatalf("expected page frontmatter ID=%q, got %q", page.ID, pageMd.GetFrontmatter().LeafWikiID) + } + + sectionMd, err := markdown.LoadMarkdownFile(filepath.Join(tmp, "root", "section", "index.md")) + if err != nil { + t.Fatalf("failed to reload section index: %v", err) + } + if sectionMd.GetFrontmatter().LeafWikiID != section.ID { + t.Fatalf("expected section frontmatter ID=%q, got %q", section.ID, sectionMd.GetFrontmatter().LeafWikiID) + } + + // Run reconstruction again and verify IDs are stable (deterministic) + tree2, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("second ReconstructTreeFromFS: %v", err) + } + + page2 := findChildBySlug(t, tree2, "no-id") + section2 := findChildBySlug(t, tree2, "section") + + if page2.ID != page.ID { + t.Fatalf("expected deterministic page ID on second run: first=%q, second=%q", page.ID, page2.ID) + } + if section2.ID != section.ID { + t.Fatalf("expected deterministic section ID on second run: first=%q, second=%q", section.ID, section2.ID) + } +} + +func TestNodeStore_ReconstructTreeFromFS_SkipsInvalidSlugs(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + // Create files and directories with invalid slug names + // Uppercase letters should be normalized + mustWriteFile(t, filepath.Join(tmp, "root", "Valid Page.md"), "# Valid", 0o644) + mustWriteFile(t, filepath.Join(tmp, "root", "UPPERCASE.md"), "# Upper", 0o644) + mustMkdir(t, filepath.Join(tmp, "root", "Valid Section")) + mustWriteFile(t, filepath.Join(tmp, "root", "Valid Section", "index.md"), "# Section", 0o644) + + // Create a valid file to ensure the test still works + mustWriteFile(t, filepath.Join(tmp, "root", "valid.md"), "# Valid", 0o644) + + tree, err := store.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS: %v", err) + } + + // The valid file should be present with normalized slug + valid := findChildBySlug(t, tree, "valid") + if valid == nil { + t.Fatalf("expected valid page to be present") + } + + // Files with spaces and uppercase should be normalized + validPage := findChildBySlug(t, tree, "valid-page") + if validPage == nil { + t.Fatalf("expected 'Valid Page.md' to be normalized to 'valid-page'") + } + + uppercase := findChildBySlug(t, tree, "uppercase") + if uppercase == nil { + t.Fatalf("expected 'UPPERCASE.md' to be normalized to 'uppercase'") + } + + validSection := findChildBySlug(t, tree, "valid-section") + if validSection == nil { + t.Fatalf("expected 'Valid Section' directory to be normalized to 'valid-section'") + } +} \ No newline at end of file diff --git a/internal/core/tree/node_store_test.go b/internal/core/tree/node_store_test.go new file mode 100644 index 00000000..acb02c32 --- /dev/null +++ b/internal/core/tree/node_store_test.go @@ -0,0 +1,638 @@ +package tree + +import ( + "errors" + "os" + "path/filepath" + "runtime" + "strings" + "testing" + + "github.com/perber/wiki/internal/core/markdown" +) + +func mustWriteFile(t *testing.T, path string, data string, perm os.FileMode) { + t.Helper() + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + if err := os.WriteFile(path, []byte(data), perm); err != nil { + t.Fatalf("write file: %v", err) + } +} + +func mustMkdir(t *testing.T, path string) { + t.Helper() + if err := os.MkdirAll(path, 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } +} + +func TestNodeStore_LoadTree_MissingFile_ReturnsDefaultRoot(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + tree, err := store.LoadTree("missing.json") + if err != nil { + t.Fatalf("LoadTree: %v", err) + } + if tree == nil { + t.Fatalf("expected tree, got nil") + } + if tree.ID != "root" || tree.Slug != "root" || tree.Title != "root" { + t.Fatalf("unexpected default root: %#v", tree) + } + if tree.Kind != NodeKindSection { + t.Fatalf("expected root kind %q, got %q", NodeKindSection, tree.Kind) + } + if tree.Parent != nil { + t.Fatalf("expected root parent nil") + } + if len(tree.Children) != 0 { + t.Fatalf("expected no children") + } +} + +func TestNodeStore_SaveTree_ThenLoadTree_AssignsParents(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + tree := &PageNode{ + ID: "root", + Slug: "root", + Title: "root", + Kind: NodeKindSection, + Children: []*PageNode{ + { + ID: "s1", + Slug: "sec", + Title: "Section", + Kind: NodeKindSection, + Children: []*PageNode{ + { + ID: "p1", + Slug: "page", + Title: "Page", + Kind: NodeKindPage, + }, + }, + }, + }, + } + + if err := store.SaveTree("tree.json", tree); err != nil { + t.Fatalf("SaveTree: %v", err) + } + + loaded, err := store.LoadTree("tree.json") + if err != nil { + t.Fatalf("LoadTree: %v", err) + } + + sec := loaded.Children[0] + p := sec.Children[0] + + if sec.Parent == nil || sec.Parent.ID != "root" { + t.Fatalf("expected section parent root, got %#v", sec.Parent) + } + if p.Parent == nil || p.Parent.ID != "s1" { + t.Fatalf("expected page parent s1, got %#v", p.Parent) + } +} + +func TestNodeStore_SaveTree_NilTree_Error(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + if err := store.SaveTree("tree.json", nil); err == nil { + t.Fatalf("expected error, got nil") + } +} + +func TestNodeStore_CreateSection_CreatesFolder_NoIndexByDefault(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + sec := &PageNode{ID: "sec1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + if err := store.CreateSection(root, sec); err != nil { + t.Fatalf("CreateSection: %v", err) + } + + // expected folder: /root/docs + dir := filepath.Join(tmp, "root", "docs") + if st, err := os.Stat(dir); err != nil || !st.IsDir() { + t.Fatalf("expected section folder at %s", dir) + } + + // no index.md by default + index := filepath.Join(dir, "index.md") + if _, err := os.Stat(index); err == nil { + t.Fatalf("did not expect index.md to exist by default: %s", index) + } +} + +func TestNodeStore_CreateSection_KindGuards(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + rootPageWrong := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindPage} + sec := &PageNode{ID: "sec1", Slug: "docs", Title: "Docs", Kind: NodeKindSection} + + if err := store.CreateSection(rootPageWrong, sec); err == nil { + t.Fatalf("expected error when parent is not a section") + } + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + pageWrong := &PageNode{ID: "x", Slug: "x", Title: "X", Kind: NodeKindPage} + if err := store.CreateSection(root, pageWrong); err == nil { + t.Fatalf("expected error when new entry is not a section") + } +} + +func TestNodeStore_CreatePage_CreatesMarkdownWithFrontmatter(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + page := &PageNode{ID: "p1", Slug: "hello", Title: "Hello World", Kind: NodeKindPage, Parent: root} + + if err := store.CreatePage(root, page); err != nil { + t.Fatalf("CreatePage: %v", err) + } + + p := filepath.Join(tmp, "root", "hello.md") + raw, err := os.ReadFile(p) + if err != nil { + t.Fatalf("read created page: %v", err) + } + + fm, body, has, err := markdown.ParseFrontmatter(string(raw)) + if err != nil { + t.Fatalf("ParseFrontmatter: %v", err) + } + if !has { + t.Fatalf("expected frontmatter") + } + if strings.TrimSpace(fm.LeafWikiID) != "p1" { + t.Fatalf("expected leafwiki_id p1, got %q", fm.LeafWikiID) + } + // CreatePage setzt nur ID im FM, Title kommt in den Body als H1 + if !strings.Contains(body, "# Hello World") { + t.Fatalf("expected H1 title in body, got: %q", body) + } +} + +func TestNodeStore_CreatePage_RejectsCollision_FileOrDir(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + + // collision as file + mustWriteFile(t, filepath.Join(tmp, "root", "dup.md"), "x", 0o644) + page := &PageNode{ID: "p1", Slug: "dup", Title: "Dup", Kind: NodeKindPage, Parent: root} + if err := store.CreatePage(root, page); err == nil { + t.Fatalf("expected PageAlreadyExistsError for existing file") + } + + // collision as dir + mustMkdir(t, filepath.Join(tmp, "root", "dupdir")) + page2 := &PageNode{ID: "p2", Slug: "dupdir", Title: "DupDir", Kind: NodeKindPage, Parent: root} + if err := store.CreatePage(root, page2); err == nil { + t.Fatalf("expected PageAlreadyExistsError for existing dir") + } +} + +func TestNodeStore_UpsertContent_Page_CreatesOrUpdates_PreservesMode(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + page := &PageNode{ID: "p1", Slug: "p", Title: "My Page", Kind: NodeKindPage, Parent: root} + + // create with custom mode + path := filepath.Join(tmp, "root", "p.md") + mustWriteFile(t, path, "# old", 0o600) + + if err := store.UpsertContent(page, "# new"); err != nil { + t.Fatalf("UpsertContent: %v", err) + } + + st, err := os.Stat(path) + if err != nil { + t.Fatalf("stat: %v", err) + } + // permissions should stay (best-effort; Windows behaves differently sometimes) + if runtime.GOOS != "windows" { + if st.Mode().Perm() != 0o600 { + t.Fatalf("expected perm 0600, got %o", st.Mode().Perm()) + } + } + + raw, _ := os.ReadFile(path) + fm, body, has, err := markdown.ParseFrontmatter(string(raw)) + if err != nil { + t.Fatalf("ParseFrontmatter: %v", err) + } + if !has { + t.Fatalf("expected FM to exist") + } + if fm.LeafWikiID != "p1" { + t.Fatalf("expected id p1, got %q", fm.LeafWikiID) + } + if fm.LeafWikiTitle != "My Page" { + t.Fatalf("expected title 'My Page', got %q", fm.LeafWikiTitle) + } + if strings.TrimSpace(body) != "# new" { + t.Fatalf("expected body '# new', got %q", body) + } +} + +func TestNodeStore_UpsertContent_Section_WritesIndexAndCreatesDir(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + sec := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + if err := store.UpsertContent(sec, "# docs"); err != nil { + t.Fatalf("UpsertContent: %v", err) + } + + index := filepath.Join(tmp, "root", "docs", "index.md") + if _, err := os.Stat(index); err != nil { + t.Fatalf("expected index.md to exist: %v", err) + } +} + +func TestNodeStore_MoveNode_Page_MovesFileStrict(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + secA := &PageNode{ID: "a", Slug: "a", Title: "A", Kind: NodeKindSection, Parent: root} + secB := &PageNode{ID: "b", Slug: "b", Title: "B", Kind: NodeKindSection, Parent: root} + page := &PageNode{ID: "p1", Slug: "p", Title: "P", Kind: NodeKindPage, Parent: secA} + + // create source file at old location (tree-based path) + src := filepath.Join(tmp, "root", "a", "p.md") + mustWriteFile(t, src, "# hi", 0o644) + + if err := store.MoveNode(page, secB); err != nil { + t.Fatalf("MoveNode: %v", err) + } + + dst := filepath.Join(tmp, "root", "b", "p.md") + if _, err := os.Stat(dst); err != nil { + t.Fatalf("expected dest file: %v", err) + } + if _, err := os.Stat(src); !os.IsNotExist(err) { + t.Fatalf("expected src removed") + } +} + +func TestNodeStore_MoveNode_DriftWhenMissingSource(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + sec := &PageNode{ID: "s", Slug: "s", Title: "S", Kind: NodeKindSection, Parent: root} + page := &PageNode{ID: "p1", Slug: "p", Title: "P", Kind: NodeKindPage, Parent: sec} + + err := store.MoveNode(page, root) + if err == nil { + t.Fatalf("expected DriftError, got nil") + } + var de *DriftError + if !errors.As(err, &de) { + t.Fatalf("expected DriftError, got %T: %v", err, err) + } +} + +func TestNodeStore_DeletePage_RemovesFile_OrDriftIfMissing(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + page := &PageNode{ID: "p1", Slug: "p", Title: "P", Kind: NodeKindPage, Parent: root} + + path := filepath.Join(tmp, "root", "p.md") + mustWriteFile(t, path, "# x", 0o644) + + if err := store.DeletePage(page); err != nil { + t.Fatalf("DeletePage: %v", err) + } + if _, err := os.Stat(path); !os.IsNotExist(err) { + t.Fatalf("expected file deleted") + } + + // delete again -> drift + err := store.DeletePage(page) + if err == nil { + t.Fatalf("expected DriftError") + } +} + +func TestNodeStore_DeleteSection_RemovesFolderRecursive_OrDriftIfMissing(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + sec := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + dir := filepath.Join(tmp, "root", "docs") + mustMkdir(t, dir) + mustWriteFile(t, filepath.Join(dir, "index.md"), "# hi", 0o644) + mustWriteFile(t, filepath.Join(dir, "nested.txt"), "x", 0o644) + + if err := store.DeleteSection(sec); err != nil { + t.Fatalf("DeleteSection: %v", err) + } + if _, err := os.Stat(dir); !os.IsNotExist(err) { + t.Fatalf("expected folder deleted") + } + + err := store.DeleteSection(sec) + if err == nil { + t.Fatalf("expected DriftError") + } +} + +func TestNodeStore_RenameNode_PageAndSection(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + + // page rename + page := &PageNode{ID: "p1", Slug: "old", Title: "P", Kind: NodeKindPage, Parent: root} + oldFile := filepath.Join(tmp, "root", "old.md") + mustWriteFile(t, oldFile, "# x", 0o644) + + if err := store.RenameNode(page, "new"); err != nil { + t.Fatalf("RenameNode(page): %v", err) + } + if _, err := os.Stat(filepath.Join(tmp, "root", "new.md")); err != nil { + t.Fatalf("expected new page file") + } + + // section rename + sec := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + secDir := filepath.Join(tmp, "root", "docs") + mustMkdir(t, secDir) + mustWriteFile(t, filepath.Join(secDir, "index.md"), "# y", 0o644) + + if err := store.RenameNode(sec, "docs2"); err != nil { + t.Fatalf("RenameNode(section): %v", err) + } + if st, err := os.Stat(filepath.Join(tmp, "root", "docs2")); err != nil || !st.IsDir() { + t.Fatalf("expected renamed section dir") + } +} + +func TestNodeStore_ReadPageRaw_Section_NoIndex_ReturnsEmptyNil(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + sec := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + // folder exists, but no index.md + mustMkdir(t, filepath.Join(tmp, "root", "docs")) + + raw, err := store.ReadPageRaw(sec) + if err != nil { + t.Fatalf("ReadPageRaw: %v", err) + } + if raw != "" { + t.Fatalf("expected empty raw for section without index, got %q", raw) + } +} + +func TestNodeStore_ReadPageRaw_Page_Missing_IsDrift(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + page := &PageNode{ID: "p1", Slug: "p", Title: "P", Kind: NodeKindPage, Parent: root} + + _, err := store.ReadPageRaw(page) + if err == nil { + t.Fatalf("expected DriftError") + } +} + +func TestNodeStore_SyncFrontmatterIfExists_Page_UpdatesOrAddsFM(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + page := &PageNode{ID: "p1", Slug: "p", Title: "Title A", Kind: NodeKindPage, Parent: root} + + path := filepath.Join(tmp, "root", "p.md") + + // file without FM + mustWriteFile(t, path, "# Body\nHello", 0o644) + + if err := store.SyncFrontmatterIfExists(page); err != nil { + t.Fatalf("SyncFrontmatterIfExists: %v", err) + } + + raw := string(mustRead(t, path)) + fm, body, has, err := markdown.ParseFrontmatter(raw) + if err != nil { + t.Fatalf("ParseFrontmatter: %v", err) + } + if !has { + t.Fatalf("expected fm after sync") + } + if fm.LeafWikiID != "p1" || fm.LeafWikiTitle != "Title A" { + t.Fatalf("unexpected fm: %#v", fm) + } + if strings.TrimSpace(body) != "# Body\nHello" { + t.Fatalf("body changed unexpectedly: %q", body) + } + + // update title and id + page.Title = "Title B" + page.ID = "p1b" + if err := store.SyncFrontmatterIfExists(page); err != nil { + t.Fatalf("SyncFrontmatterIfExists(update): %v", err) + } + raw2 := string(mustRead(t, path)) + fm2, body2, has2, err := markdown.ParseFrontmatter(raw2) + if err != nil { + t.Fatalf("ParseFrontmatter: %v", err) + } + if !has2 || fm2.LeafWikiID != "p1b" || fm2.LeafWikiTitle != "Title B" { + t.Fatalf("expected updated fm, got %#v", fm2) + } + if strings.TrimSpace(body2) != "# Body\nHello" { + t.Fatalf("body changed unexpectedly on update: %q", body2) + } +} + +func TestNodeStore_SyncFrontmatterIfExists_Section_NoIndex_NoSideEffects(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + sec := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + // Do NOT create folder: sync must not mkdir via write-path; should return nil. + if err := store.SyncFrontmatterIfExists(sec); err != nil { + t.Fatalf("SyncFrontmatterIfExists(section): %v", err) + } + // Ensure no folder created implicitly + if _, err := os.Stat(filepath.Join(tmp, "root", "docs")); err == nil { + t.Fatalf("expected no side effects (folder created), but folder exists") + } +} + +func TestNodeStore_resolveNode_FileVsFolder(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + + page := &PageNode{ID: "p1", Slug: "p", Title: "P", Kind: NodeKindPage, Parent: root} + mustWriteFile(t, filepath.Join(tmp, "root", "p.md"), "# x", 0o644) + + r1, err := store.resolveNode(page) + if err != nil { + t.Fatalf("resolveNode(page): %v", err) + } + if r1.Kind != NodeKindPage || !r1.HasContent || !strings.HasSuffix(r1.FilePath, "p.md") { + t.Fatalf("unexpected resolved: %#v", r1) + } + + sec := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + secDir := filepath.Join(tmp, "root", "docs") + mustMkdir(t, secDir) + + r2, err := store.resolveNode(sec) + if err != nil { + t.Fatalf("resolveNode(sec without index): %v", err) + } + if r2.Kind != NodeKindSection || r2.HasContent { + t.Fatalf("expected section without content: %#v", r2) + } + + mustWriteFile(t, filepath.Join(secDir, "index.md"), "# idx", 0o644) + r3, err := store.resolveNode(sec) + if err != nil { + t.Fatalf("resolveNode(sec with index): %v", err) + } + if r3.Kind != NodeKindSection || !r3.HasContent || !strings.HasSuffix(r3.FilePath, "index.md") { + t.Fatalf("unexpected resolved: %#v", r3) + } +} + +func TestNodeStore_ConvertNode_PageToSection_MovesToIndex(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + entry := &PageNode{ID: "p1", Slug: "p", Title: "P", Kind: NodeKindPage, Parent: root} + + file := filepath.Join(tmp, "root", "p.md") + mustWriteFile(t, file, "# hi", 0o644) + + if err := store.ConvertNode(entry, NodeKindSection); err != nil { + t.Fatalf("ConvertNode(page->section): %v", err) + } + + index := filepath.Join(tmp, "root", "p", "index.md") + if _, err := os.Stat(index); err != nil { + t.Fatalf("expected index at %s", index) + } + if _, err := os.Stat(file); !os.IsNotExist(err) { + t.Fatalf("expected old file removed") + } +} + +func TestNodeStore_ConvertNode_SectionToPage_RejectsNonEmptyFolder(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + entry := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + dir := filepath.Join(tmp, "root", "docs") + mustMkdir(t, dir) + mustWriteFile(t, filepath.Join(dir, "index.md"), "# idx", 0o644) + mustWriteFile(t, filepath.Join(dir, "other.txt"), "nope", 0o644) + + err := store.ConvertNode(entry, NodeKindPage) + if err == nil { + t.Fatalf("expected ConvertNotAllowedError") + } + var cna *ConvertNotAllowedError + if !errors.As(err, &cna) { + t.Fatalf("expected ConvertNotAllowedError, got %T: %v", err, err) + } +} + +func TestNodeStore_ConvertNode_SectionToPage_WithIndex_MovesAndRemovesFolder(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + entry := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + dir := filepath.Join(tmp, "root", "docs") + mustMkdir(t, dir) + mustWriteFile(t, filepath.Join(dir, "index.md"), "# idx", 0o644) + + if err := store.ConvertNode(entry, NodeKindPage); err != nil { + t.Fatalf("ConvertNode(section->page): %v", err) + } + + pageFile := filepath.Join(tmp, "root", "docs.md") + if _, err := os.Stat(pageFile); err != nil { + t.Fatalf("expected page file: %v", err) + } + if _, err := os.Stat(dir); !os.IsNotExist(err) { + t.Fatalf("expected folder removed") + } +} + +func TestNodeStore_ConvertNode_SectionToPage_NoIndex_CreatesEmptyPageWithFM(t *testing.T) { + tmp := t.TempDir() + store := NewNodeStore(tmp) + + root := &PageNode{ID: "root", Slug: "root", Title: "root", Kind: NodeKindSection} + entry := &PageNode{ID: "s1", Slug: "docs", Title: "Docs", Kind: NodeKindSection, Parent: root} + + dir := filepath.Join(tmp, "root", "docs") + mustMkdir(t, dir) + // empty folder, no index.md + + if err := store.ConvertNode(entry, NodeKindPage); err != nil { + t.Fatalf("ConvertNode(section->page no index): %v", err) + } + + pageFile := filepath.Join(tmp, "root", "docs.md") + raw := string(mustRead(t, pageFile)) + fm, _, has, err := markdown.ParseFrontmatter(raw) + if err != nil { + t.Fatalf("ParseFrontmatter: %v", err) + } + if !has || fm.LeafWikiID != "s1" || fm.LeafWikiTitle != "Docs" { + t.Fatalf("unexpected fm: %#v", fm) + } + if _, err := os.Stat(dir); !os.IsNotExist(err) { + t.Fatalf("expected folder removed") + } +} + +func mustRead(t *testing.T, path string) []byte { + t.Helper() + b, err := os.ReadFile(path) + if err != nil { + t.Fatalf("read %s: %v", path, err) + } + return b +} diff --git a/internal/core/tree/page_node.go b/internal/core/tree/page_node.go index 20bf0d1d..50f448fb 100644 --- a/internal/core/tree/page_node.go +++ b/internal/core/tree/page_node.go @@ -1,6 +1,13 @@ package tree -import "time" +import ( + "crypto/sha256" + "encoding/binary" + "encoding/hex" + "io" + "sort" + "time" +) // PageMetadata holds simple metadata for a page. type PageMetadata struct { @@ -10,6 +17,13 @@ type PageMetadata struct { LastAuthorID string `json:"lastAuthorId"` } +type NodeKind string + +const ( + NodeKindPage NodeKind = "page" + NodeKindSection NodeKind = "section" +) + // PageNode represents a single node in the tree // It has an ID, a parent, a path, and children // The ID is a unique identifier for the entry @@ -21,6 +35,7 @@ type PageNode struct { Position int `json:"position"` // Position is the position of the entry Parent *PageNode `json:"-"` + Kind NodeKind `json:"kind"` // Kind is the kind of the node (page or folder) Metadata PageMetadata `json:"metadata"` // Metadata holds metadata about the page } @@ -60,3 +75,94 @@ func (p *PageNode) CalculatePath() string { } return p.Parent.CalculatePath() + "/" + p.Slug } + +// Hash returns a deterministic hash of the node and all descendants. +// Parent is intentionally ignored to avoid cycles. +func (p *PageNode) Hash() string { + sum := p.hashSum(true) // includeMetadata = true + return hex.EncodeToString(sum[:]) +} + +func (p *PageNode) hashSum(includeMetadata bool) [32]byte { + h := sha256.New() + + // depth-first, deterministic + // Write directly to hash to avoid buffering entire tree in memory + p.writeHashPayload(h, includeMetadata) + + var out [32]byte + copy(out[:], h.Sum(nil)) + return out +} + +func (p *PageNode) writeHashPayload(w io.Writer, includeMetadata bool) { + // Node fields (parent excluded) + writeString(w, "id") + writeString(w, p.ID) + writeString(w, "title") + writeString(w, p.Title) + writeString(w, "slug") + writeString(w, p.Slug) + writeString(w, "kind") + writeString(w, string(p.Kind)) + writeString(w, "position") + writeInt64(w, int64(p.Position)) + + if includeMetadata { + writeString(w, "meta.createdAt") + writeTime(w, p.Metadata.CreatedAt) + writeString(w, "meta.updatedAt") + writeTime(w, p.Metadata.UpdatedAt) + writeString(w, "meta.creatorId") + writeString(w, p.Metadata.CreatorID) + writeString(w, "meta.lastAuthorId") + writeString(w, p.Metadata.LastAuthorID) + } + + // Children: enforce stable order (Position, then ID as tie-breaker) + children := make([]*PageNode, 0, len(p.Children)) + children = append(children, p.Children...) + + sort.SliceStable(children, func(i, j int) bool { + if children[i] == nil || children[j] == nil { + return children[j] != nil // nils last + } + if children[i].Position != children[j].Position { + return children[i].Position < children[j].Position + } + return children[i].ID < children[j].ID + }) + + writeString(w, "children.count") + writeInt64(w, int64(len(children))) + + for _, ch := range children { + if ch == nil { + writeString(w, "child.nil") + continue + } + // Separator for safety + writeString(w, "child.begin") + ch.writeHashPayload(w, includeMetadata) + writeString(w, "child.end") + } +} + +func writeString(w io.Writer, s string) { + // length-prefixed string (uint32 len + bytes) + _ = binary.Write(w, binary.BigEndian, uint32(len(s))) + _, _ = io.WriteString(w, s) +} + +func writeInt64(w io.Writer, v int64) { + _ = binary.Write(w, binary.BigEndian, v) +} + +func writeTime(w io.Writer, t time.Time) { + // stable: UnixNano in UTC (Zero => 0) + if t.IsZero() { + writeInt64(w, 0) + return + } + writeInt64(w, t.UTC().UnixNano()) +} diff --git a/internal/core/tree/page_store.go b/internal/core/tree/page_store.go deleted file mode 100644 index c412e065..00000000 --- a/internal/core/tree/page_store.go +++ /dev/null @@ -1,305 +0,0 @@ -package tree - -import ( - "encoding/json" - "errors" - "fmt" - "io" - "os" - "path" - "strings" - - "github.com/perber/wiki/internal/core/shared" -) - -type PageStore struct { - storageDir string -} - -func NewPageStore(storageDir string) *PageStore { - return &PageStore{ - storageDir: storageDir, - } -} - -func (f *PageStore) LoadTree(filename string) (*PageNode, error) { - fullPath := path.Join(f.storageDir, filename) - - // check if file exists - if _, err := os.Stat(fullPath); os.IsNotExist(err) { - return &PageNode{ - ID: "root", - Slug: "root", - Title: "root", - Parent: nil, - Position: 0, - Children: []*PageNode{}, - }, nil - } - - file, err := os.Open(fullPath) - if err != nil { - return nil, fmt.Errorf("could not open tree file") - } - defer file.Close() - data, err := io.ReadAll(file) - - if err != nil { - return nil, fmt.Errorf("could not read tree file") - } - - tree := &PageNode{} - if err := json.Unmarshal(data, tree); err != nil { - return nil, fmt.Errorf("could not unmarshal tree data") - } - - // assigns parent to children - f.assignParentToChildren(tree) - - return tree, nil -} - -func (f *PageStore) assignParentToChildren(parent *PageNode) { - for _, child := range parent.Children { - child.Parent = parent - f.assignParentToChildren(child) - } -} - -func (f *PageStore) SaveTree(filename string, tree *PageNode) error { - if tree == nil { - return errors.New("a tree is required") - } - - fullPath := path.Join(f.storageDir, filename) - - data, err := json.Marshal(tree) - if err != nil { - return fmt.Errorf("could not marshal tree: %v", err) - } - - if err := shared.WriteFileAtomic(fullPath, data, 0o644); err != nil { - return fmt.Errorf("could not atomically write tree file: %v", err) - } - - return nil -} - -func (f *PageStore) CreatePage(parentEntry *PageNode, newEntry *PageNode) error { - if parentEntry == nil { - return errors.New("a parent entry is required") - } - - if newEntry == nil { - return errors.New("a new entry is required") - } - - // Retrieving the path of the parent entry - parentPath := path.Join(f.storageDir, GeneratePathFromPageNode(parentEntry)) - - if err := EnsurePageIsFolder(f.storageDir, GeneratePathFromPageNode(parentEntry)); err != nil { - return fmt.Errorf("could not prepare parent folder: %w", err) - } - - // Check if the folder exists - if _, err := os.Stat(parentPath); os.IsNotExist(err) { - if err := os.MkdirAll(parentPath, 0755); err != nil { - return fmt.Errorf("could not create folder: %v", err) - } - // Create an empty index.md file / Fallback! - indexPath := path.Join(parentPath, "index.md") - if err := shared.WriteFileAtomic(indexPath, []byte(""), 0o644); err != nil { - return fmt.Errorf("could not create index file: %v", err) - } - } - - // Now we can create the new entry as a file in the parent folder - newFilename := path.Join(parentPath, newEntry.Slug+".md") - if _, err := os.Stat(newFilename); err == nil { - // The file already exists - return fmt.Errorf("file already exists: %v", err) - } - - // Create the file - content := []byte("# " + newEntry.Title + "\n") - if err := shared.WriteFileAtomic(newFilename, content, 0o644); err != nil { - return fmt.Errorf("could not create file: %v", err) - } - - return nil -} - -func (f *PageStore) DeletePage(entry *PageNode) error { - if entry == nil { - return errors.New("an entry is required") - } - - // Retrieving the path of the entry - entryPath := path.Join(f.storageDir, GeneratePathFromPageNode(entry)) - - // Check if the entry is a folder - if info, err := os.Stat(entryPath); err == nil && info.IsDir() { - // Delete the folder - if err := os.RemoveAll(entryPath); err != nil { - return fmt.Errorf("could not delete folder: %v", err) - } - } - - // Check if the entry is a file - if _, err := os.Stat(entryPath + ".md"); err == nil { - // Delete the file - if err := os.Remove(entryPath + ".md"); err != nil { - return fmt.Errorf("could not delete file: %v", err) - } - } - - if entry.Parent != nil { - _ = FoldPageFolderIfEmpty(f.storageDir, GeneratePathFromPageNode(entry.Parent)) - } - - return nil -} - -func (f *PageStore) UpdatePage(entry *PageNode, slug string, content string) error { - if entry == nil { - return errors.New("an entry is required") - } - - filePath, err := f.getFilePath(entry) - if err != nil { - return fmt.Errorf("could not get file path: %v", err) - } - - // Check if the file exists - file, err := os.Stat(filePath) - if err != nil { - return fmt.Errorf("file not found: %v", err) - } - - mode := file.Mode() - - // Update the file content - if err := shared.WriteFileAtomic(filePath, []byte(content), mode); err != nil { - return fmt.Errorf("could not write to file atomically: %v", err) - } - - // We need to check if the slug has changed - if entry.Slug != slug { - // Get the old path - oldPath := path.Join(f.storageDir, GeneratePathFromPageNode(entry)) - // Split the path - parts := strings.Split(oldPath, "/") - // Create the new path - newPath := strings.Join(parts[:len(parts)-1], "/") + "/" + slug - // Check if the old path is a directory - // If it is a directory, we need to rename the directory - // If it is a file, we need to rename the file - if _, err := os.Stat(oldPath); err == nil { - // Rename the directory - if err := os.Rename(oldPath, newPath); err != nil { - return fmt.Errorf("could not rename directory: %v", err) - } - - return nil - } - // Rename the file - if err := os.Rename(oldPath+".md", newPath+".md"); err != nil { - return fmt.Errorf("could not rename file: %v", err) - } - } - - return nil -} - -// MovePage moves a page to a other node -func (f *PageStore) MovePage(entry *PageNode, parentEntry *PageNode) error { - if entry == nil { - return errors.New("an entry is required") - } - - if parentEntry == nil { - return errors.New("a parent entry is required") - } - - // Retrieving the path of the entry - parentPath := path.Join(f.storageDir, GeneratePathFromPageNode(parentEntry)) - - if err := EnsurePageIsFolder(f.storageDir, GeneratePathFromPageNode(parentEntry)); err != nil { - return fmt.Errorf("could not convert parent to folder: %w", err) - } - - // now we have created the folder, we can move the entry to the new parent - currentPath := path.Join(f.storageDir, GeneratePathFromPageNode(entry)) - - // Check if the entry is a file - var src, dest string - if _, err := os.Stat(currentPath + ".md"); err == nil { - src = currentPath + ".md" - dest = path.Join(parentPath, entry.Slug+".md") - } else { - src = currentPath - dest = path.Join(parentPath, entry.Slug) - } - - // Move the file to the parentPath - if err := os.Rename(src, dest); err != nil { - return fmt.Errorf("could not move file: %v", err) - } - - if entry.Parent != nil { - _ = FoldPageFolderIfEmpty(f.storageDir, GeneratePathFromPageNode(entry.Parent)) - } - - return nil -} - -// ReadPageContent returns the content of a page -func (f *PageStore) ReadPageContent(entry *PageNode) (string, error) { - if entry == nil { - return "", errors.New("an entry is required") - } - - filePath, err := f.getFilePath(entry) - if err != nil { - return "", fmt.Errorf("could not get file path: %v", err) - } - - // Check if the file exists - if _, err := os.Stat(filePath); err != nil { - return "", fmt.Errorf("file not found: %v", err) - } - - // Read the file content - file, err := os.Open(filePath) - if err != nil { - return "", fmt.Errorf("could not open file: %v", err) - } - defer file.Close() - - content, err := io.ReadAll(file) - if err != nil { - return "", fmt.Errorf("could not read file: %v", err) - } - return string(content), nil -} - -func (f *PageStore) getFilePath(entry *PageNode) (string, error) { - if entry == nil { - return "", errors.New("an entry is required") - } - - // Retrieving the path of the entry - entryPath := path.Join(f.storageDir, GeneratePathFromPageNode(entry)) - - // Check if the entry is a file - if _, err := os.Stat(entryPath + ".md"); err == nil { - return entryPath + ".md", nil - } - - // Check if the entry is a folder - if info, err := os.Stat(entryPath); err == nil && info.IsDir() { - return path.Join(entryPath, "index.md"), nil - } - - return "", errors.New("file not found") -} diff --git a/internal/core/tree/page_store_test.go b/internal/core/tree/page_store_test.go deleted file mode 100644 index 90f9129b..00000000 --- a/internal/core/tree/page_store_test.go +++ /dev/null @@ -1,657 +0,0 @@ -package tree - -import ( - "os" - "path/filepath" - "testing" -) - -func TestPageStore_CreatePage(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - root := &PageNode{ - ID: "root", - Title: "Root", - Slug: "root", - Children: []*PageNode{}, - } - - page := &PageNode{ - ID: "page-1", - Title: "Hello World", - Slug: "hello-world", - Parent: root, - } - - err := store.CreatePage(root, page) - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - - // Prüfen, ob Datei existiert - expectedFile := filepath.Join(tmpDir, "root", "hello-world.md") - if _, err := os.Stat(expectedFile); os.IsNotExist(err) { - t.Errorf("Expected file was not created: %v", expectedFile) - } - - // Optional: Inhalt checken - content, err := os.ReadFile(expectedFile) - if err != nil { - t.Fatalf("Failed to read file: %v", err) - } - - expected := "# Hello World\n" - if string(content) != expected { - t.Errorf("Unexpected file content. Got: %q, Expected: %q", string(content), expected) - } -} - -func TestPageStore_CreatePage_WithFallbackCreatesIndex(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - // Simuliere vorhandene root.md-Datei (die in Folder + index.md migriert werden soll) - rootFile := filepath.Join(tmpDir, "root.md") - if err := os.WriteFile(rootFile, []byte("# Root File"), 0644); err != nil { - t.Fatalf("Failed to create root.md: %v", err) - } - - root := &PageNode{ - ID: "root", - Title: "Root", - Slug: "root", - Children: []*PageNode{}, - } - - page := &PageNode{ - ID: "page-2", - Title: "Subpage", - Slug: "subpage", - Parent: root, - } - - err := store.CreatePage(root, page) - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - - // Erwartet: root/index.md existiert - indexPath := filepath.Join(tmpDir, "root", "index.md") - if _, err := os.Stat(indexPath); os.IsNotExist(err) { - t.Errorf("Expected fallback index.md file not found: %v", indexPath) - } -} - -func TestPageStore_CreatePage_DeepHierarchy(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - // Baue tiefe Baumstruktur: root → arch → project1 - root := &PageNode{ - ID: "root", - Title: "Root", - Slug: "root", - Children: []*PageNode{}, - } - arch := &PageNode{ - ID: "arch", - Title: "Architecture", - Slug: "architecture", - Parent: root, - Children: []*PageNode{}, - } - project := &PageNode{ - ID: "project1", - Title: "Project One", - Slug: "project-one", - Parent: arch, - Children: []*PageNode{}, - } - page := &PageNode{ - ID: "final", - Title: "Deep Content", - Slug: "deep-content", - Parent: project, - } - - // Füge Struktur hinzu (simulate parent nodes) - root.Children = []*PageNode{arch} - arch.Children = []*PageNode{project} - project.Children = []*PageNode{} - - // Versuche, Page in tiefem Pfad anzulegen - err := store.CreatePage(project, page) - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - - // Prüfe, ob Datei wirklich existiert - expectedPath := filepath.Join(tmpDir, "root", "architecture", "project-one", "deep-content.md") - if _, err := os.Stat(expectedPath); os.IsNotExist(err) { - t.Errorf("Expected file not found at deep path: %s", expectedPath) - } -} - -func TestPageStore_CreatePage_NilChecks(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - validParent := &PageNode{ - ID: "root", - Title: "Root", - Slug: "root", - Children: []*PageNode{}, - } - - // Fall 1: Parent ist nil - err := store.CreatePage(nil, &PageNode{ID: "1", Title: "Page", Slug: "page"}) - if err == nil { - t.Error("Expected error when parent is nil, got nil") - } - - // Fall 2: Page ist nil - err = store.CreatePage(validParent, nil) - if err == nil { - t.Error("Expected error when page is nil, got nil") - } -} - -func TestPageStore_DeletePage_File(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "p1", - Title: "Page", - Slug: "page", - } - - // Erstelle Datei manuell - filePath := filepath.Join(tmpDir, "page.md") - if err := os.WriteFile(filePath, []byte("# Page"), 0644); err != nil { - t.Fatalf("Failed to create page file: %v", err) - } - - // DeletePage aufrufen - if err := store.DeletePage(page); err != nil { - t.Fatalf("DeletePage failed: %v", err) - } - - // Prüfen, ob Datei weg ist - if _, err := os.Stat(filePath); !os.IsNotExist(err) { - t.Errorf("Expected file to be deleted: %v", filePath) - } -} - -func TestPageStore_DeletePage_Directory(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - // Seite mit Ordnerstruktur - page := &PageNode{ - ID: "p2", - Title: "Folder Page", - Slug: "folder-page", - } - - dirPath := filepath.Join(tmpDir, "folder-page") - if err := os.MkdirAll(dirPath, 0755); err != nil { - t.Fatalf("Failed to create folder: %v", err) - } - - // Simuliere index.md - indexFile := filepath.Join(dirPath, "index.md") - if err := os.WriteFile(indexFile, []byte("# Index"), 0644); err != nil { - t.Fatalf("Failed to create index.md: %v", err) - } - - // DeletePage aufrufen - if err := store.DeletePage(page); err != nil { - t.Fatalf("DeletePage failed: %v", err) - } - - // Ordner darf nicht mehr existieren - if _, err := os.Stat(dirPath); !os.IsNotExist(err) { - t.Errorf("Expected folder to be deleted: %v", dirPath) - } -} - -func TestPageStore_DeletePage_NilEntry(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - err := store.DeletePage(nil) - if err == nil { - t.Errorf("Expected error when passing nil entry, got none") - } -} - -func TestPageStore_UpdatePage_ContentOnly(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "p1", - Title: "My Page", - Slug: "my-page", - } - - filePath := filepath.Join(tmpDir, "my-page.md") - if err := os.WriteFile(filePath, []byte("# Old Content"), 0644); err != nil { - t.Fatalf("Failed to create page file: %v", err) - } - - newContent := "# New Content" - err := store.UpdatePage(page, "my-page", newContent) - if err != nil { - t.Fatalf("UpdatePage failed: %v", err) - } - - data, err := os.ReadFile(filePath) - if err != nil { - t.Fatalf("Could not read updated file: %v", err) - } - - if string(data) != newContent { - t.Errorf("Expected content %q, got %q", newContent, string(data)) - } -} - -func TestPageStore_UpdatePage_WithSlugChange_File(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "p2", - Title: "Old Page", - Slug: "old-page", - } - - oldPath := filepath.Join(tmpDir, "old-page.md") - if err := os.WriteFile(oldPath, []byte("# Old Page"), 0644); err != nil { - t.Fatalf("Failed to create old page: %v", err) - } - - newSlug := "new-page" - err := store.UpdatePage(page, newSlug, "# Updated Content") - if err != nil { - t.Fatalf("UpdatePage failed: %v", err) - } - - newPath := filepath.Join(tmpDir, "new-page.md") - if _, err := os.Stat(newPath); os.IsNotExist(err) { - t.Errorf("Expected renamed file at: %v", newPath) - } -} - -func TestPageStore_UpdatePage_WithSlugChange_Directory(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "p3", - Title: "Old Dir", - Slug: "old-dir", - } - - oldDir := filepath.Join(tmpDir, "old-dir") - if err := os.MkdirAll(oldDir, 0755); err != nil { - t.Fatalf("Failed to create old directory: %v", err) - } - - indexFile := filepath.Join(oldDir, "index.md") - if err := os.WriteFile(indexFile, []byte("# Index"), 0644); err != nil { - t.Fatalf("Failed to create index.md: %v", err) - } - - newSlug := "new-dir" - err := store.UpdatePage(page, newSlug, "# New Index") - if err != nil { - t.Fatalf("UpdatePage failed: %v", err) - } - - newDir := filepath.Join(tmpDir, "new-dir") - if _, err := os.Stat(newDir); os.IsNotExist(err) { - t.Errorf("Expected renamed directory: %v", newDir) - } -} - -func TestPageStore_UpdatePage_InvalidEntry(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - err := store.UpdatePage(nil, "slug", "content") - if err == nil { - t.Errorf("Expected error when updating nil entry, got none") - } -} - -func TestPageStore_UpdatePage_FileNotFound(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "p4", - Title: "Ghost Page", - Slug: "ghost", - } - - err := store.UpdatePage(page, "ghost", "# Nothing here") - if err == nil { - t.Errorf("Expected error when updating non-existent file, got none") - } -} - -func TestPageStore_MovePage_FileToFolder(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ID: "1", Title: "Page A", Slug: "a"} - pagePath := filepath.Join(tmpDir, "a.md") - if err := os.WriteFile(pagePath, []byte("# Page A"), 0644); err != nil { - t.Fatalf("Setup failed: %v", err) - } - - parent := &PageNode{ID: "root", Title: "Root", Slug: "root"} - parentFile := filepath.Join(tmpDir, "root.md") - if err := os.WriteFile(parentFile, []byte("# Root Page"), 0644); err != nil { - t.Fatalf("Failed to create root.md: %v", err) - } - - err := store.MovePage(page, parent) - if err != nil { - t.Fatalf("MovePage failed: %v", err) - } - - newPath := filepath.Join(tmpDir, "root", "a.md") - if _, err := os.Stat(newPath); os.IsNotExist(err) { - t.Errorf("Expected file to be moved to: %v", newPath) - } -} - -func TestPageStore_MovePage_FolderToFolder(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - // Ordnerstruktur erstellen - page := &PageNode{ID: "2", Title: "Docs", Slug: "docs"} - pagePath := filepath.Join(tmpDir, "docs") - if err := os.MkdirAll(pagePath, 0755); err != nil { - t.Fatalf("Failed to create source folder: %v", err) - } - - // Zielordner - target := &PageNode{ID: "root", Title: "Root", Slug: "root"} - targetPath := filepath.Join(tmpDir, "root") - if err := os.MkdirAll(targetPath, 0755); err != nil { - t.Fatalf("Failed to create target folder: %v", err) - } - - err := store.MovePage(page, target) - if err != nil { - t.Fatalf("MovePage failed: %v", err) - } - - newPath := filepath.Join(targetPath, "docs") - if _, err := os.Stat(newPath); os.IsNotExist(err) { - t.Errorf("Expected moved folder not found at: %v", newPath) - } -} - -func TestPageStore_MovePage_InvalidNilInput(t *testing.T) { - store := NewPageStore(t.TempDir()) - - err := store.MovePage(nil, nil) - if err == nil { - t.Errorf("Expected error on nil inputs, got none") - } -} - -func TestPageStore_MovePage_PreventCircularMove(t *testing.T) { - // Setup - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - // Erzeuge einfache Baumstruktur: root → parent → child - root := &PageNode{ - ID: "root", - Title: "Root", - Slug: "root", - Children: []*PageNode{}, - } - - parent := &PageNode{ - ID: "parent", - Title: "Parent", - Slug: "parent", - Parent: root, - Children: []*PageNode{}, - } - - child := &PageNode{ - ID: "child", - Title: "Child", - Slug: "child", - Parent: parent, - Children: []*PageNode{}, - } - - root.Children = []*PageNode{parent} - parent.Children = []*PageNode{child} - - // 🧪 Versuch: parent in child verschieben → sollte fehlschlagen (wenn später implementiert) - err := store.MovePage(parent, child) - - // Aktuell kein Check implementiert → nur Hinweis - if err == nil { - t.Log("[TODO] Expected failure when moving parent into child (circular), but got none.") - // Optionale manuelle Fehlerausgabe, damit es sichtbar bleibt - t.Fail() - } -} - -func TestPageStore_ReadPageContent_File(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "read1", - Title: "Read Me", - Slug: "read-me", - } - - filePath := filepath.Join(tmpDir, "read-me.md") - expected := "# Hello from file" - if err := os.WriteFile(filePath, []byte(expected), 0644); err != nil { - t.Fatalf("Failed to write test file: %v", err) - } - - content, err := store.ReadPageContent(page) - if err != nil { - t.Fatalf("ReadPageContent failed: %v", err) - } - - if content != expected { - t.Errorf("Expected content %q, got %q", expected, content) - } -} - -func TestPageStore_ReadPageContent_Index(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "read2", - Title: "Folder Page", - Slug: "folder-page", - } - - folder := filepath.Join(tmpDir, "folder-page") - if err := os.MkdirAll(folder, 0755); err != nil { - t.Fatalf("Failed to create folder: %v", err) - } - - indexPath := filepath.Join(folder, "index.md") - expected := "# Hello from index" - if err := os.WriteFile(indexPath, []byte(expected), 0644); err != nil { - t.Fatalf("Failed to write index file: %v", err) - } - - content, err := store.ReadPageContent(page) - if err != nil { - t.Fatalf("ReadPageContent failed: %v", err) - } - - if content != expected { - t.Errorf("Expected content %q, got %q", expected, content) - } -} - -func TestPageStore_ReadPageContent_NotFound(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - page := &PageNode{ - ID: "read3", - Title: "Missing Page", - Slug: "missing", - } - - _, err := store.ReadPageContent(page) - if err == nil { - t.Errorf("Expected error for missing file, got none") - } -} - -func TestPageStore_SaveAndLoadTree_AssignsParent(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - tree := &PageNode{ - ID: "root", - Title: "Root", - Slug: "root", - Children: []*PageNode{ - { - ID: "child-1", - Title: "Child 1", - Slug: "child-1", - Children: []*PageNode{ - { - ID: "grandchild-1", - Title: "Grandchild 1", - Slug: "grandchild-1", - }, - }, - }, - }, - } - - if err := store.SaveTree("tree.json", tree); err != nil { - t.Fatalf("SaveTree failed: %v", err) - } - - loaded, err := store.LoadTree("tree.json") - if err != nil { - t.Fatalf("LoadTree failed: %v", err) - } - - child := loaded.Children[0] - grandchild := child.Children[0] - - if child.Parent == nil || child.Parent.ID != loaded.ID { - t.Errorf("Child node's parent not assigned correctly") - } - - if grandchild.Parent == nil || grandchild.Parent.ID != child.ID { - t.Errorf("Grandchild node's parent not assigned correctly") - } -} - -func TestPageStore_LoadTree_MissingFile(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - tree, err := store.LoadTree("nonexistent.json") - if err != nil { - t.Fatalf("Expected default tree, got error: %v", err) - } - - if tree.ID != "root" { - t.Errorf("Expected root ID, got %q", tree.ID) - } -} - -func TestPageStore_LoadTree_InvalidJSON(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - path := filepath.Join(tmpDir, "tree.json") - if err := os.WriteFile(path, []byte("invalid-json"), 0644); err != nil { - t.Fatalf("Failed to write corrupt file: %v", err) - } - - _, err := store.LoadTree("tree.json") - if err == nil { - t.Error("Expected error when loading invalid JSON, got none") - } -} - -func TestPageStore_getFilePath(t *testing.T) { - tmpDir := t.TempDir() - store := NewPageStore(tmpDir) - - // Case 1: .md file exists - fileNode := &PageNode{ - ID: "file1", - Slug: "page", - Title: "Page", - } - filePath := filepath.Join(tmpDir, "page.md") - if err := os.WriteFile(filePath, []byte("Content"), 0644); err != nil { - t.Fatalf("Failed to create .md file: %v", err) - } - - path, err := store.getFilePath(fileNode) - if err != nil { - t.Fatalf("Expected file path for .md file, got error: %v", err) - } - if path != filePath { - t.Errorf("Unexpected path. Got: %s, Expected: %s", path, filePath) - } - - // Case 2: Directory with index.md - dirNode := &PageNode{ - ID: "dir1", - Slug: "folder", - Title: "Folder", - } - dirPath := filepath.Join(tmpDir, "folder") - indexPath := filepath.Join(dirPath, "index.md") - if err := os.MkdirAll(dirPath, 0755); err != nil { - t.Fatalf("Failed to create folder: %v", err) - } - if err := os.WriteFile(indexPath, []byte("Index content"), 0644); err != nil { - t.Fatalf("Failed to write index.md: %v", err) - } - - path, err = store.getFilePath(dirNode) - if err != nil { - t.Fatalf("Expected index.md path, got error: %v", err) - } - if path != indexPath { - t.Errorf("Unexpected path. Got: %s, Expected: %s", path, indexPath) - } - - // Case 3: Not found - invalidNode := &PageNode{ - ID: "missing", - Slug: "does-not-exist", - Title: "Missing", - } - _, err = store.getFilePath(invalidNode) - if err == nil { - t.Errorf("Expected error for missing file, got nil") - } -} diff --git a/internal/core/tree/path_lookup.go b/internal/core/tree/path_lookup.go index 93dc8a73..2ac6cd84 100644 --- a/internal/core/tree/path_lookup.go +++ b/internal/core/tree/path_lookup.go @@ -2,9 +2,11 @@ package tree // PathLookup helpers for LookupPath() type PathSegment struct { - Slug string `json:"slug"` - Exists bool `json:"exists"` - ID *string `json:"id,omitempty"` + Slug string `json:"slug"` + Exists bool `json:"exists"` + Kind *NodeKind `json:"kind,omitempty"` + Title *string `json:"title,omitempty"` + ID *string `json:"id,omitempty"` } type PathLookup struct { diff --git a/internal/core/tree/schema.go b/internal/core/tree/schema.go index ebc7e59d..42fdd8d3 100644 --- a/internal/core/tree/schema.go +++ b/internal/core/tree/schema.go @@ -7,7 +7,7 @@ import ( "path/filepath" ) -const CurrentSchemaVersion = 1 +const CurrentSchemaVersion = 2 type SchemaInfo struct { Version int `json:"version"` diff --git a/internal/core/tree/slug_service.go b/internal/core/tree/slug_service.go index a4bdbeca..25bf1bd9 100644 --- a/internal/core/tree/slug_service.go +++ b/internal/core/tree/slug_service.go @@ -82,6 +82,31 @@ func hasSlugConflict(parent *PageNode, currentID string, slug string) bool { return false } +func (s *SlugService) NormalizePath(path string, validate bool) (string, error) { + segments := make([]string, 0) + + for _, segment := range strings.Split(path, string("/")) { + + if segment == "" { + continue + } + + if validate { + // normalize first and then validate + // the validation will ensure that the segment is a proper slug + seg := normalizeSlug(segment) + if err := s.IsValidSlug(seg); err != nil { + return "", fmt.Errorf("segment '%s' is not a valid slug: %v", segment, err) + } + segment = seg + } else { + segment = normalizeSlug(segment) + } + segments = append(segments, segment) + } + return strings.Join(segments, string("/")), nil +} + func (s *SlugService) NormalizeFilename(filename string) string { ext := filepath.Ext(filename) base := filename[:len(filename)-len(ext)] diff --git a/internal/core/tree/slug_service_test.go b/internal/core/tree/slug_service_test.go index b4c45aa4..e65df416 100644 --- a/internal/core/tree/slug_service_test.go +++ b/internal/core/tree/slug_service_test.go @@ -74,3 +74,32 @@ func TestGenerateUniqueSlug_SpecialCharacters(t *testing.T) { t.Errorf("Expected 'aepfel-and-baume', got '%s'", result) } } + +func TestNormalizePath(t *testing.T) { + s := NewSlugService() + + tests := []struct { + input string + expected string + }{ + {"folder/subfolder/page.md", "folder/subfolder/page-md"}, + {"My Folder/Another Folder/Page Title.md", "my-folder/another-folder/page-title-md"}, + {"Äpfel & Bäume/Über uns.md", "apfel-and-baume/uber-uns-md"}, + {"folder//subfolder///page.md", "folder/subfolder/page-md"}, + {"/leading/slash/page.md", "leading/slash/page-md"}, + {"only-file.md", "only-file-md"}, + } + + for _, test := range tests { + + result, err := s.NormalizePath(test.input, true) + if err != nil { + t.Errorf("Unexpected error for input %v: %v", test.input, err) + continue + } + + if result != test.expected { + t.Errorf("For input %v, expected %v but got %v", test.input, test.expected, result) + } + } +} diff --git a/internal/core/tree/tree_service.go b/internal/core/tree/tree_service.go index 222a20f4..07c04161 100644 --- a/internal/core/tree/tree_service.go +++ b/internal/core/tree/tree_service.go @@ -1,14 +1,16 @@ package tree import ( + "errors" "fmt" - "log" + "log/slog" "os" "sort" "strings" "sync" "time" + "github.com/perber/wiki/internal/core/markdown" "github.com/perber/wiki/internal/core/shared" ) @@ -18,7 +20,8 @@ type TreeService struct { storageDir string treeFilename string tree *PageNode - store *PageStore + store *NodeStore + log *slog.Logger mu sync.RWMutex } @@ -29,7 +32,8 @@ func NewTreeService(storageDir string) *TreeService { storageDir: storageDir, treeFilename: "tree.json", tree: nil, - store: NewPageStore(storageDir), + store: NewNodeStore(storageDir), + log: slog.Default().With("component", "TreeService"), } } @@ -47,43 +51,66 @@ func (t *TreeService) LoadTree() error { } // Load the schema version - log.Printf("Checking schema version...") + t.log.Info("Checking schema version...") schema, err := loadSchema(t.storageDir) if err != nil { - log.Printf("Error loading schema: %v", err) + t.log.Error("Error loading schema", "error", err) return err } if schema.Version < CurrentSchemaVersion { - log.Printf("Migrating schema from version %d to %d...", schema.Version, CurrentSchemaVersion) + t.log.Info("Migrating schema", "fromVersion", schema.Version, "toVersion", CurrentSchemaVersion) if err := t.migrate(schema.Version); err != nil { - log.Printf("Error migrating schema: %v", err) + t.log.Error("Error migrating schema", "error", err) return err } - - // migration was successful, update schema version - if err := saveSchema(t.storageDir, CurrentSchemaVersion); err != nil { - log.Printf("Error saving schema: %v", err) - return err - } - - return t.saveTreeLocked() } return err } func (t *TreeService) migrate(fromVersion int) error { - if fromVersion < 1 { - if err := t.migrateTreeToV1Schema(); err != nil { + + for v := fromVersion; v < CurrentSchemaVersion; v++ { + switch v { + case 0: + if err := t.migrateToV1(); err != nil { + t.log.Error("Error migrating to v1", "error", err) + return err + } + case 1: + if err := t.migrateToV2(); err != nil { + t.log.Error("Error migrating to v2", "error", err) + return err + } + } + + // Save the tree after each migration step + if err := t.saveTreeLocked(); err != nil { + t.log.Error("Error saving tree after migration", "version", v+1, "error", err) + return err + } + + // Update the schema version file + if err := saveSchema(t.storageDir, v+1); err != nil { + t.log.Error("Error saving schema", "version", v+1, "error", err) return err } } return nil } -func (t *TreeService) migrateTreeToV1Schema() error { - // Backfill metadata for all pages +func (t *TreeService) migrateToV1() error { + if t.tree == nil { + return ErrTreeNotLoaded + } + + return t.backfillMetadataLocked() +} + +// backfillMetadataLocked backfills CreatedAt and UpdatedAt timestamps for all nodes from filesystem +// The caller must ensure that t.tree is not nil and must hold the appropriate lock before calling this method +func (t *TreeService) backfillMetadataLocked() error { var backfillMetadata func(node *PageNode) error backfillMetadata = func(node *PageNode) error { // If CreatedAt is already set, assume metadata was backfilled and skip @@ -93,25 +120,34 @@ func (t *TreeService) migrateTreeToV1Schema() error { // Read creation and modification times from the filesystem // and set them in the metadata - filePath, err := t.store.getFilePath(node) + + r, err := t.store.resolveNode(node) if err != nil { - // Log the error and continue - // We still want to backfill metadata for other nodes - // but we cannot do it for this node - log.Printf("could not get file path for node %s: %v", node.ID, err) + // Log and continue (same behavior as before) + t.log.Error("Could not resolve node for metadata backfill", "nodeID", node.ID, "error", err) return nil } + // Prefer the real on-disk object: + // - Page => .md + // - Folder with content => /index.md + // - Folder without content => use folder mtime + statPath := r.FilePath + if r.Kind == NodeKindSection && !r.HasContent { + statPath = r.DirPath + } + // The default value is set to now createdAt := time.Now().UTC() updatedAt := time.Now().UTC() - // Try to read file info; on error, log non-NotExist issues and keep defaults - if info, err := os.Stat(filePath); err == nil { - createdAt = info.ModTime().UTC() - updatedAt = info.ModTime().UTC() - } else if !os.IsNotExist(err) { - log.Printf("could not stat file for node %s at path %s: %v", node.ID, filePath, err) + if statPath != "" { + if info, err := os.Stat(statPath); err == nil { + createdAt = info.ModTime().UTC() + updatedAt = info.ModTime().UTC() + } else if !os.IsNotExist(err) { + t.log.Error("Could not stat node for metadata", "nodeID", node.ID, "path", statPath, "error", err) + } } node.Metadata = PageMetadata{ @@ -132,111 +168,301 @@ func (t *TreeService) migrateTreeToV1Schema() error { return backfillMetadata(t.tree) } -// SaveTree saves the tree to the storage directory -func (t *TreeService) SaveTree() error { +// migrateToV2 migrates the tree to the v2 schema +// Adds frontmatter to all existing pages if missing +// Adds kind to all nodes +func (t *TreeService) migrateToV2() error { + if t.tree == nil { + return ErrTreeNotLoaded + } + t.backfillKindFromFSLocked() + + // Traverse all pages and add frontmatter if missing + var addFrontmatter func(node *PageNode) error + addFrontmatter = func(node *PageNode) error { + // Read the content of the page + content, err := t.store.ReadPageRaw(node) + if err != nil { + if errors.Is(err, os.ErrNotExist) || errors.Is(err, ErrFileNotFound) { + t.log.Warn("Page file does not exist, skipping frontmatter addition", "nodeID", node.ID) + // Recurse into children + for _, child := range node.Children { + if err := addFrontmatter(child); err != nil { + t.log.Error("Error adding frontmatter to child node", "nodeID", child.ID, "error", err) + return err + } + } + return nil + } + t.log.Error("Could not read page content for node", "nodeID", node.ID, "error", err) + return fmt.Errorf("could not read page content for node %s: %w", node.ID, err) + } + + // Parse the frontmatter + fm, body, has, err := markdown.ParseFrontmatter(content) + if err != nil { + t.log.Error("Could not parse frontmatter for node", "nodeID", node.ID, "error", err) + return fmt.Errorf("could not parse frontmatter for node %s: %w", node.ID, err) + } + + // Decide if we need to change anything + changed := false + + // If there is no frontmatter, start with a new one + if !has { + fm = markdown.Frontmatter{} + changed = true + } + + // Ensure required fields exist + if strings.TrimSpace(fm.LeafWikiID) == "" { + fm.LeafWikiID = node.ID + changed = true + } + // Optional but nice: keep title in sync *at least once* + // (you might choose to NOT overwrite existing title) + if strings.TrimSpace(fm.LeafWikiTitle) == "" { + fm.LeafWikiTitle = node.Title + changed = true + } + + // Only write if changed + if changed { + newContent, err := markdown.BuildMarkdownWithFrontmatter(fm, body) + if err != nil { + t.log.Error("could not build markdown with frontmatter", "nodeID", node.ID, "error", err) + return fmt.Errorf("could not build markdown with frontmatter for node %s: %w", node.ID, err) + } + + filePath, err := t.store.contentPathForNodeWrite(node) + if err != nil { + return fmt.Errorf("could not determine content path for node %s: %w", node.ID, err) + } + + if err := shared.WriteFileAtomic(filePath, []byte(newContent), 0o644); err != nil { + t.log.Error("could not write updated page content", "nodeID", node.ID, "filePath", filePath, "error", err) + return fmt.Errorf("could not write updated page content for node %s: %w", node.ID, err) + } + + t.log.Info("frontmatter backfilled", "nodeID", node.ID, "path", filePath) + } + + // Recurse into children + for _, child := range node.Children { + if err := addFrontmatter(child); err != nil { + t.log.Error("Error adding frontmatter to child node", "nodeID", child.ID, "error", err) + return err + } + } + + return nil + } + + // start the recursion from the children of the root + for _, child := range t.tree.Children { + if err := addFrontmatter(child); err != nil { + t.log.Error("Error adding frontmatter to child node", "nodeID", child.ID, "error", err) + return err + } + } + + return nil +} + +func (t *TreeService) backfillKindFromFSLocked() { + if t.tree == nil { + return + } + t.tree.Kind = NodeKindSection + + var walk func(n *PageNode) + walk = func(n *PageNode) { + if n == nil { + return + } + + // Root skip + if n.ID != "root" { + // Nur backfillen, wenn Kind fehlt/unknown + if n.Kind != NodeKindPage && n.Kind != NodeKindSection { + r, err := t.store.resolveNode(n) + if err == nil { + n.Kind = r.Kind + } else { + // Fallback-Heuristik, wenn auf Disk nichts existiert + if n.HasChildren() { + n.Kind = NodeKindSection + } else { + n.Kind = NodeKindPage + } + t.log.Warn("could not resolve node on disk; kind backfilled by heuristic", + "nodeID", n.ID, "slug", n.Slug, "err", err, "kind", n.Kind) + } + } + } + + for _, ch := range n.Children { + walk(ch) + } + } + + for _, ch := range t.tree.Children { + walk(ch) + } +} + +func (t *TreeService) withLockedTree(fn func() error) error { t.mu.Lock() defer t.mu.Unlock() - return t.saveTreeLocked() + return fn() } +func (t *TreeService) withRLockedTree(fn func() error) error { + t.mu.RLock() + defer t.mu.RUnlock() + + return fn() +} + +// SaveTree saves the tree to the storage directory +func (t *TreeService) SaveTree() error { + return t.withLockedTree(t.saveTreeLocked) +} + +// saveTreeLocked saves the tree to the storage directory func (t *TreeService) saveTreeLocked() error { - // Save the tree to the storage directory return t.store.SaveTree(t.treeFilename, t.tree) } -// Create Page adds a new page to the tree -func (t *TreeService) CreatePage(userID string, parentID *string, title string, slug string) (*string, error) { - t.mu.Lock() - defer t.mu.Unlock() +// TreeHash returns the current hash of the tree +func (t *TreeService) TreeHash() string { + var hash string + _ = t.withRLockedTree(func() error { + hash = t.tree.Hash() + return nil + }) + return hash +} - result, err := t.createPageLocked(userID, parentID, title, slug) +// ReconstructTreeFromFS reconstructs the tree from the filesystem +func (t *TreeService) ReconstructTreeFromFS() error { + return t.withLockedTree(t.reconstructTreeFromFSLocked) +} + +func (t *TreeService) reconstructTreeFromFSLocked() error { + // Reconstruct the tree from the filesystem + // This is a more complex operation and may involve reading the filesystem structure + newTree, err := t.store.ReconstructTreeFromFS() if err != nil { - return nil, err + t.log.Error("Error reconstructing tree from filesystem", "error", err) + return err + } + + // Defensive check to protect against unexpected nil returns from ReconstructTreeFromFS + if newTree == nil { + return fmt.Errorf("internal error: ReconstructTreeFromFS returned nil tree") + } + + // Save the old tree in case we need to revert + // Note: oldTree may be nil if this is the first reconstruction (which is expected) + oldTree := t.tree + t.tree = newTree + + // Backfill metadata for all nodes + if err := t.backfillMetadataLocked(); err != nil { + t.log.Error("Error backfilling metadata after reconstruction", "error", err) + // Revert tree assignment on failure (may set back to nil, which is fine) + t.tree = oldTree + return err } + // Save the tree if err := t.saveTreeLocked(); err != nil { - return nil, fmt.Errorf("could not save tree: %v", err) + t.log.Error("Error saving tree after reconstruction", "error", err) + // Revert tree assignment on failure (may set back to nil, which is fine) + t.tree = oldTree + return err + } + + // Update the schema version to prevent unnecessary migrations on next startup + if err := saveSchema(t.storageDir, CurrentSchemaVersion); err != nil { + t.log.Error("Error saving schema after reconstruction", "error", err) + // Note: We don't revert the tree here since it was already saved successfully + return err } - return result, nil + return nil } -// createPageLocked creates a new page under the given parent -// Lock must be held by the caller -func (t *TreeService) createPageLocked(userID string, parentID *string, title string, slug string) (*string, error) { +// Create Node adds a new node to the tree +func (t *TreeService) CreateNode(userID string, parentID *string, title string, slug string, nodeKind *NodeKind) (*string, error) { + var result *string + err := t.withLockedTree(func() error { + var err error + result, err = t.createNodeLocked(userID, parentID, title, slug, nodeKind) + return err + }) + return result, err +} + +// createNodeLocked creates a new node under the given parent +// Lock must be held by the caller +func (t *TreeService) createNodeLocked(userID string, parentID *string, title string, slug string, kind *NodeKind) (*string, error) { if t.tree == nil { return nil, ErrTreeNotLoaded } - if parentID == nil { - // The entry needs to be added to the root - root := t.tree - if root == nil { - return nil, ErrParentNotFound - } - - if root.ChildAlreadyExists(slug) { - return nil, ErrPageAlreadyExists - } + // Decide which kind we create + k := NodeKindPage + if kind != nil { + k = *kind + } - // Generate a unique ID for the new page - id, err := shared.GenerateUniqueID() + // Resolve the parent + parent := t.tree + if parentID != nil && *parentID != "" && *parentID != "root" { + var err error + parent, err = t.findPageByIDLocked(t.tree.Children, *parentID) if err != nil { - return nil, fmt.Errorf("could not generate unique ID: %v", err) - } - - now := time.Now().UTC() - - entry := &PageNode{ - ID: id, - Title: title, - Parent: root, - Slug: slug, - Position: len(root.Children), // Set the position to the end of the list - Children: []*PageNode{}, - Metadata: PageMetadata{ - CreatedAt: now, - UpdatedAt: now, - CreatorID: userID, - LastAuthorID: userID, - }, - } - - if err := t.store.CreatePage(root, entry); err != nil { - return nil, fmt.Errorf("could not create page entry: %v", err) + return nil, ErrParentNotFound } + } - root.Children = append(root.Children, entry) - - // Store Tree after adding page - // (Saving the tree is now the caller's responsibility) - return &entry.ID, nil + // Check if a child with the same slug already exists + if parent.ChildAlreadyExists(slug) { + return nil, ErrPageAlreadyExists } - // Find the parent page - parent, err := t.findPageByIDLocked(t.tree.Children, *parentID) - if err != nil { - return nil, ErrParentNotFound + // Check if the current parent is a section + // if not, we need to convert it to a section + if parent.Kind != NodeKindSection && parent.ID != "root" { + t.log.Info("converting parent to section", "parentID", parent.ID, "oldKind", parent.Kind, "newKind", NodeKindSection) + if err := t.store.ConvertNode(parent, NodeKindSection); err != nil { + return nil, fmt.Errorf("could not convert parent node: %w", err) + } + parent.Kind = NodeKindSection } - if parent.ChildAlreadyExists(slug) { - return nil, ErrPageAlreadyExists + if parent.Kind != NodeKindSection { + return nil, fmt.Errorf("cannot add child to non-section parent, got %q", parent.Kind) } // Generate a unique ID for the new page id, err := shared.GenerateUniqueID() if err != nil { - return nil, fmt.Errorf("could not generate unique ID: %v", err) + return nil, fmt.Errorf("could not generate unique ID: %w", err) } now := time.Now().UTC() + entry := &PageNode{ ID: id, - Slug: slug, Title: title, Parent: parent, + Slug: slug, + Kind: k, Position: len(parent.Children), // Set the position to the end of the list Children: []*PageNode{}, Metadata: PageMetadata{ @@ -247,23 +473,34 @@ func (t *TreeService) createPageLocked(userID string, parentID *string, title st }, } - if err := t.store.CreatePage(parent, entry); err != nil { - return nil, fmt.Errorf("could not create page entry: %v", err) + // Create on disk depending on kind + switch k { + case NodeKindPage: + if err := t.store.CreatePage(parent, entry); err != nil { + return nil, fmt.Errorf("could not create page entry: %w", err) + } + case NodeKindSection: + if err := t.store.CreateSection(parent, entry); err != nil { + return nil, fmt.Errorf("could not create section entry: %w", err) + } } // Add the new page to the parent parent.Children = append(parent.Children, entry) - return &entry.ID, nil } // FindPageByID finds a page in the tree by its ID // If the page is not found, it returns an error func (t *TreeService) FindPageByID(entry []*PageNode, id string) (*PageNode, error) { - t.mu.RLock() - defer t.mu.RUnlock() + var result *PageNode + err := t.withRLockedTree(func() error { + var err error + result, err = t.findPageByIDLocked(entry, id) + return err + }) - return t.findPageByIDLocked(entry, id) + return result, err } // findPageByIDLocked finds a page in the tree by its ID @@ -283,93 +520,187 @@ func (t *TreeService) findPageByIDLocked(entry []*PageNode, id string) (*PageNod return nil, ErrPageNotFound } -// DeletePage deletes a page from the tree -func (t *TreeService) DeletePage(userID string, id string, recursive bool) error { - t.mu.Lock() - defer t.mu.Unlock() +// DeleteNode deletes a node from the tree +func (t *TreeService) DeleteNode(userID string, id string, recursive bool) error { + err := t.withLockedTree(func() error { + if t.tree == nil { + return ErrTreeNotLoaded + } - if t.tree == nil { - return ErrTreeNotLoaded - } + // Find the node to delete + node, err := t.findPageByIDLocked(t.tree.Children, id) + if err != nil { + return ErrPageNotFound + } - // Find the page to delete - page, err := t.findPageByIDLocked(t.tree.Children, id) - if err != nil { - return ErrPageNotFound - } + // Check if node has children + if node.HasChildren() && !recursive { + return ErrPageHasChildren + } - // Check if page has children - if page.HasChildren() && !recursive { - return ErrPageHasChildren - } + // Delete the node from the parent + parent := node.Parent + if parent == nil { + return ErrParentNotFound + } - // Delete the page from the parent - parent := page.Parent - if parent == nil { - return ErrParentNotFound - } + switch node.Kind { + case NodeKindSection: + if err := t.store.DeleteSection(node); err != nil { + return fmt.Errorf("could not delete section entry: %w", err) + } + case NodeKindPage: + if node.HasChildren() { + // This should not happen due to earlier check, but just in case + // Convert to section and delete recursively + t.log.Info("converting page to section for recursive delete", "pageID", node.ID) + if err := t.store.ConvertNode(node, NodeKindSection); err != nil { + return fmt.Errorf("could not convert page to section: %w", err) + } + node.Kind = NodeKindSection + if err := t.store.DeleteSection(node); err != nil { + return fmt.Errorf("could not delete section entry: %w", err) + } + } else { + if err := t.store.DeletePage(node); err != nil { + return fmt.Errorf("could not delete page entry: %w", err) + } + } + default: + return fmt.Errorf("unknown node kind: %v", node.Kind) + } - // Delete the page from the filesystem - if err := t.store.DeletePage(page); err != nil { - return fmt.Errorf("could not delete page entry: %v", err) - } + // Remove the page from the parent + for i, e := range parent.Children { + if e.ID == id { + parent.Children = append(parent.Children[:i], parent.Children[i+1:]...) + break + } + } - // Remove the page from the parent - for i, e := range parent.Children { - if e.ID == id { - parent.Children = append(parent.Children[:i], parent.Children[i+1:]...) - break + t.reindexPositions(parent) + return t.saveTreeLocked() + }) + return err +} + +// UpdateNode updates a node (page/section) in the tree and syncs disk state via NodeStore. +func (t *TreeService) UpdateNode(userID string, id string, title string, slug string, content *string) error { + return t.withLockedTree(func() error { + if t.tree == nil { + return ErrTreeNotLoaded } - } - t.reindexPositions(parent) + // Find node + node, err := t.findPageByIDLocked(t.tree.Children, id) + if err != nil { + return ErrPageNotFound + } + + // Slug must be unique under same parent (when changed) + if slug != node.Slug && node.Parent != nil && node.Parent.ChildAlreadyExists(slug) { + return ErrPageAlreadyExists + } + + // Kind change? + // This operation is currently disabled to avoid complexity with content migration. + // We need to check if we need it later. + // if kind != nil && *kind != node.Kind { + // // Section -> Page only allowed if no children + // if node.Kind == NodeKindSection && *kind == NodeKindPage && node.HasChildren() { + // return ErrPageHasChildren + // } + + // t.log.Info("changing node kind", "nodeID", node.ID, "oldKind", node.Kind, "newKind", *kind) + // if err := t.store.ConvertNode(node, *kind); err != nil { + // return fmt.Errorf("could not convert node: %w", err) + // } + // node.Kind = *kind + // } + + // Content update? + if content != nil { + t.log.Info("updating node content", "nodeID", node.ID) + if err := t.store.UpsertContent(node, *content); err != nil { + return fmt.Errorf("could not upsert content: %w", err) + } + } + + // Rename slug on disk (must happen while node still has old slug) + if slug != node.Slug { + t.log.Info("renaming node slug", "nodeID", node.ID, "oldSlug", node.Slug, "newSlug", slug) + if err := t.store.RenameNode(node, slug); err != nil { + return fmt.Errorf("could not rename node: %w", err) + } + node.Slug = slug + } + + // Update title in tree + node.Title = title + + // Update metadata + node.Metadata.UpdatedAt = time.Now().UTC() + node.Metadata.LastAuthorID = userID + + // Keep frontmatter in sync *if file exists* (important when title changed but content == nil) + if err := t.store.SyncFrontmatterIfExists(node); err != nil { + return fmt.Errorf("could not sync frontmatter: %w", err) + } + + // Save tree + return t.saveTreeLocked() + }) - return t.saveTreeLocked() } -// UpdatePage updates a page in the tree -func (t *TreeService) UpdatePage(userID string, id string, title string, slug string, content string) error { - t.mu.Lock() - defer t.mu.Unlock() +func (t *TreeService) ConvertNode(userID string, id string, kind NodeKind) error { + return t.withLockedTree(func() error { + if t.tree == nil { + return ErrTreeNotLoaded + } - if t.tree == nil { - return ErrTreeNotLoaded - } + // Find node + node, err := t.findPageByIDLocked(t.tree.Children, id) + if err != nil { + return ErrPageNotFound + } - // Find the page to update - page, err := t.findPageByIDLocked(t.tree.Children, id) - if err != nil { - return ErrPageNotFound - } + if node.Kind == kind { + // No change + return nil + } - // Check if the slug is unique when slug changes! - if slug != page.Slug && page.Parent.ChildAlreadyExists(slug) { - return ErrPageAlreadyExists - } + // Section -> Page only allowed if no children + if node.Kind == NodeKindSection && kind == NodeKindPage && node.HasChildren() { + return ErrPageHasChildren + } - // Update the entry in the filesystem! - if err := t.store.UpdatePage(page, slug, content); err != nil { - return fmt.Errorf("could not update page entry: %v", err) - } + t.log.Info("changing node kind", "nodeID", node.ID, "oldKind", node.Kind, "newKind", kind) - // Update the page - page.Title = title - page.Slug = slug - // Update metadata - page.Metadata.UpdatedAt = time.Now().UTC() - page.Metadata.LastAuthorID = userID - // Save the tree - return t.saveTreeLocked() + if err := t.store.ConvertNode(node, kind); err != nil { + return fmt.Errorf("could not convert node: %w", err) + } + node.Kind = kind + + // Update metadata + node.Metadata.UpdatedAt = time.Now().UTC() + node.Metadata.LastAuthorID = userID + + // Keep frontmatter in sync *if file exists* (important when kind changed but content == nil) + if err := t.store.SyncFrontmatterIfExists(node); err != nil { + return fmt.Errorf("could not sync frontmatter: %w", err) + } + + // Save tree + return t.saveTreeLocked() + }) } // GetTree returns the tree func (t *TreeService) GetTree() *PageNode { - t.mu.Lock() - defer t.mu.Unlock() + t.mu.RLock() + defer t.mu.RUnlock() - if t.tree != nil { - t.sortTreeByPosition(t.tree) - } return t.tree } @@ -391,7 +722,7 @@ func (t *TreeService) GetPage(id string) (*Page, error) { // Get the content of the page content, err := t.store.ReadPageContent(page) if err != nil { - return nil, fmt.Errorf("could not get page content: %v", err) + return nil, fmt.Errorf("could not get page content: %w", err) } return &Page{ @@ -416,7 +747,7 @@ func (t *TreeService) FindPageByRoutePath(entry []*PageNode, routePath string) ( // Get the content of the entry content, err := t.store.ReadPageContent(e) if err != nil { - return nil, fmt.Errorf("could not get page content: %v", err) + return nil, fmt.Errorf("could not get page content: %w", err) } return &Page{ @@ -499,6 +830,8 @@ func (t *TreeService) LookupPagePathLocked(entry []*PageNode, p string) (*PathLo // Segment exists lookup.Segments[i].Exists = true lookup.Segments[i].ID = &e.ID + lookup.Segments[i].Kind = &e.Kind + lookup.Segments[i].Title = &e.Title // Move to the next entry entry = e.Children @@ -527,7 +860,10 @@ func (t *TreeService) LookupPagePathLocked(entry []*PageNode, p string) (*PathLo return lookup, nil } -func (t *TreeService) EnsurePagePath(userID string, p string, targetTitle string) (*EnsurePathResult, error) { +// EnsurePagePath ensures that a given path exists in the tree +// It creates any missing segments as needed +// Returns the final page node and a list of created nodes +func (t *TreeService) EnsurePagePath(userID string, p string, targetTitle string, kind *NodeKind) (*EnsurePathResult, error) { t.mu.Lock() defer t.mu.Unlock() @@ -537,81 +873,78 @@ func (t *TreeService) EnsurePagePath(userID string, p string, targetTitle string created := []*PageNode{} - // Lookup the path lookup, err := t.LookupPagePathLocked(t.tree.Children, p) if err != nil { - return nil, fmt.Errorf("could not lookup page path: %v", err) + return nil, fmt.Errorf("could not lookup page path: %w", err) } - // If the path exists, return the existing page + // Path exists -> return existing if lookup.Exists { - page, err := t.findPageByIDLocked(t.tree.Children, *lookup.Segments[len(lookup.Segments)-1].ID) + last := lookup.Segments[len(lookup.Segments)-1] + page, err := t.findPageByIDLocked(t.tree.Children, *last.ID) if err != nil { - return nil, fmt.Errorf("could not find existing page by ID: %v", err) + return nil, fmt.Errorf("could not find existing page by ID: %w", err) } - return &EnsurePathResult{ - Exists: true, - Page: page, - }, nil + return &EnsurePathResult{Exists: true, Page: page}, nil } - // If the path does not exist, create it - var currentID *string + // Create missing segments + var currentID *string // nil means root for i, segment := range lookup.Segments { - if segment.Exists { - // If the segment exists, use it currentID = segment.ID continue } - // Create the segment - title := segment.Slug + // Title + segTitle := segment.Slug if i == len(lookup.Segments)-1 { - // If this is the last segment, use the targetTitle - title = targetTitle + segTitle = targetTitle } - // If the segment does not exist, create it - newPageID, err := t.createPageLocked(userID, currentID, title, segment.Slug) - if err != nil { - return nil, fmt.Errorf("could not create page: %v", err) + // Kind: intermediate segments are sections, last segment uses provided kind (or page/section default) + kindToUse := NodeKindSection + if i == len(lookup.Segments)-1 && kind != nil { + kindToUse = *kind } - currentID = newPageID - // Append the newly created page node to the created slice - // It is a synthetic PageNode with only ID, Slug and Title set - created = append(created, &PageNode{ID: *currentID, Slug: segment.Slug, Title: title}) - // If this is the last segment, return the current page - if i == len(lookup.Segments)-1 { - page, err := t.findPageByIDLocked(t.tree.Children, *currentID) - if err != nil { - return nil, fmt.Errorf("could not find created page by ID: %v", err) - } - - // Save the tree - if err := t.saveTreeLocked(); err != nil { - return nil, fmt.Errorf("could not save tree: %v", err) - } - - return &EnsurePathResult{ - Exists: true, - Page: page, - Created: created, - }, nil + newID, err := t.createNodeLocked(userID, currentID, segTitle, segment.Slug, &kindToUse) + if err != nil { + return nil, fmt.Errorf("could not create segment %q: %w", segment.Slug, err) } + currentID = newID + + created = append(created, &PageNode{ + ID: *newID, + Slug: segment.Slug, + Title: segTitle, + Kind: kindToUse, + }) } - // Save the tree + // Resolve final page + if currentID == nil { + return nil, fmt.Errorf("could not ensure page path") + } + page, err := t.findPageByIDLocked(t.tree.Children, *currentID) + if err != nil { + return nil, fmt.Errorf("could not find created page by ID: %w", err) + } + + // Save once if err := t.saveTreeLocked(); err != nil { - return nil, fmt.Errorf("could not save tree: %v", err) + return nil, fmt.Errorf("could not save tree: %w", err) } - return nil, fmt.Errorf("could not ensure page path") + return &EnsurePathResult{ + Exists: true, + Page: page, + Created: created, + }, nil } -// MovePage moves a page to another parent -func (t *TreeService) MovePage(userID string, id string, parentID string) error { +// MoveNode moves a node to another parent (root if parentID is empty/"root") +func (t *TreeService) MoveNode(userID string, id string, parentID string) error { t.mu.Lock() defer t.mu.Unlock() @@ -619,52 +952,59 @@ func (t *TreeService) MovePage(userID string, id string, parentID string) error return ErrTreeNotLoaded } - // Find the page to move - page, err := t.findPageByIDLocked(t.tree.Children, id) + // Find node to move + node, err := t.findPageByIDLocked(t.tree.Children, id) if err != nil { return ErrPageNotFound } - // We think that the page is moved to the root + // Resolve destination parent (default root) newParent := t.tree - - // Check if a parentID is provided if parentID != "" && parentID != "root" { - // Find the new parent newParent, err = t.findPageByIDLocked(t.tree.Children, parentID) if err != nil { return fmt.Errorf("new parent not found: %w", ErrParentNotFound) } } - // Child with the same slug already exists - if newParent.ChildAlreadyExists(page.Slug) { + // Same slug collision under new parent + if newParent.ChildAlreadyExists(node.Slug) { return fmt.Errorf("child with the same slug already exists: %w", ErrPageAlreadyExists) } - // Check if the page is not moved to itself - if page.ID == newParent.ID { + // Can't move into itself + if node.ID == newParent.ID { return fmt.Errorf("page cannot be moved to itself: %w", ErrPageCannotBeMovedToItself) } - // Check if a circular reference is created - if page.IsChildOf(newParent.ID, true) { + // Circular reference guard: node cannot be moved under its own descendants + if node.IsChildOf(newParent.ID, true) { return fmt.Errorf("circular reference detected: %w", ErrMovePageCircularReference) } - // Move the page in the filesystem - if err := t.store.MovePage(page, newParent); err != nil { - return fmt.Errorf("could not move page entry: %w", err) + // If destination parent is a PAGE, auto-convert it to SECTION so it can host children + if newParent.ID != "root" && newParent.Kind == NodeKindPage { + if err := t.store.ConvertNode(newParent, NodeKindSection); err != nil { + return fmt.Errorf("could not auto-convert new parent page to section: %w", err) + } + newParent.Kind = NodeKindSection + } + + // Defensive: after possible conversion, destination must be a section + if newParent.Kind != NodeKindSection { + return fmt.Errorf("destination parent must be a section, got %q", newParent.Kind) + } + + // Move on disk (strict by node.Kind inside NodeStore) + if err := t.store.MoveNode(node, newParent); err != nil { + return fmt.Errorf("could not move node on disk: %w", err) } - // Move the page to the new parent - // Remove the page from the old parent - oldParent := page.Parent + // Unlink from old parent in tree + oldParent := node.Parent if oldParent == nil { return fmt.Errorf("old parent not found: %w", ErrParentNotFound) } - - // Remove the page from the old parent for i, e := range oldParent.Children { if e.ID == id { oldParent.Children = append(oldParent.Children[:i], oldParent.Children[i+1:]...) @@ -672,20 +1012,20 @@ func (t *TreeService) MovePage(userID string, id string, parentID string) error } } - // Add the page to the new parent - page.Position = len(newParent.Children) - newParent.Children = append(newParent.Children, page) - page.Parent = newParent + // Link under new parent + node.Position = len(newParent.Children) + newParent.Children = append(newParent.Children, node) + node.Parent = newParent - // Update Metadata of the moved page - page.Metadata.UpdatedAt = time.Now().UTC() - page.Metadata.LastAuthorID = userID + // Update metadata + node.Metadata.UpdatedAt = time.Now().UTC() + node.Metadata.LastAuthorID = userID - // Reindex the positions of the old parent + // Reindex positions t.reindexPositions(newParent) t.reindexPositions(oldParent) - // Save the tree + // Persist tree return t.saveTreeLocked() } @@ -755,6 +1095,39 @@ func (t *TreeService) SortPages(parentID string, orderedIDs []string) error { return t.saveTreeLocked() } +// maybeCollapseSectionToPageLocked tries to collapse a section node into a page node +// It is not used currently, but after testing the user flow we might want to integrate it +// into UpdateNode or MoveNode operations +// Lock must be held by the caller +// func (t *TreeService) maybeCollapseSectionToPageLocked(node *PageNode) { +// if node == nil || node.ID == "root" { +// return +// } +// if node.Kind != NodeKindSection { +// return +// } +// if node.HasChildren() { +// return +// } + +// // Only collapse if index.md exists +// indexPath, err := t.store.contentPathForNodeRead(node) +// if err != nil { +// return +// } +// if _, err := os.Stat(indexPath); err != nil { +// // no index.md => keep as section +// return +// } + +// // Try collapse (will refuse if folder has other files) +// if err := t.store.ConvertNode(node, NodeKindPage); err != nil { +// // not allowed (e.g. folder not empty) -> keep section +// return +// } +// node.Kind = NodeKindPage +// } + func (t *TreeService) reindexPositions(parent *PageNode) { sort.SliceStable(parent.Children, func(i, j int) bool { return parent.Children[i].Position < parent.Children[j].Position @@ -764,11 +1137,11 @@ func (t *TreeService) reindexPositions(parent *PageNode) { } } -func (t *TreeService) sortTreeByPosition(node *PageNode) { - sort.SliceStable(node.Children, func(i, j int) bool { - return node.Children[i].Position < node.Children[j].Position - }) - for _, child := range node.Children { - t.sortTreeByPosition(child) - } -} +// func (t *TreeService) sortTreeByPosition(node *PageNode) { +// sort.SliceStable(node.Children, func(i, j int) bool { +// return node.Children[i].Position < node.Children[j].Position +// }) +// for _, child := range node.Children { +// t.sortTreeByPosition(child) +// } +// } diff --git a/internal/core/tree/tree_service_test.go b/internal/core/tree/tree_service_test.go index 9a63ab64..384a99ad 100644 --- a/internal/core/tree/tree_service_test.go +++ b/internal/core/tree/tree_service_test.go @@ -6,800 +6,1057 @@ import ( "path/filepath" "strings" "testing" + + "github.com/perber/wiki/internal/core/markdown" ) -func TestTreeService_SaveAndLoadTree(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - - // Initialen Tree manuell setzen - service.tree = &PageNode{ - ID: "root", - Title: "Root", - Slug: "root", - Children: []*PageNode{ - { - ID: "child1", - Title: "Child 1", - Slug: "child-1", - Children: []*PageNode{ - { - ID: "child1a", - Title: "Child 1a", - Slug: "child-1a", - }, - }, - }, - }, - } - - // SaveTree ausführen - if err := service.SaveTree(); err != nil { - t.Fatalf("SaveTree failed: %v", err) - } +// --- helpers --- - // Neue Instanz zum Laden - loaded := NewTreeService(tmpDir) - if err := loaded.LoadTree(); err != nil { - t.Fatalf("LoadTree failed: %v", err) - } +func newLoadedService(t *testing.T) (*TreeService, string) { + t.Helper() + tmpDir := t.TempDir() - // Verifikation der Struktur - root := loaded.GetTree() - if root.ID != "root" || root.Title != "Root" { - t.Errorf("Expected root node not loaded correctly") + // Ensure schema is current so LoadTree doesn't try to migrate unless a test wants it. + if err := saveSchema(tmpDir, CurrentSchemaVersion); err != nil { + t.Fatalf("saveSchema failed: %v", err) } - if len(root.Children) != 1 || root.Children[0].ID != "child1" { - t.Errorf("Child not loaded correctly") + svc := NewTreeService(tmpDir) + if err := svc.LoadTree(); err != nil { + t.Fatalf("LoadTree failed: %v", err) } + return svc, tmpDir +} - grandchild := root.Children[0].Children[0] - if grandchild == nil || grandchild.ID != "child1a" { - t.Errorf("Grandchild not loaded correctly") +func mustStat(t *testing.T, path string) os.FileInfo { + t.Helper() + info, err := os.Stat(path) + if err != nil { + t.Fatalf("expected %q to exist, stat error: %v", path, err) } + return info +} - // Verifiziere Parent-Zuweisung - if root.Children[0].Parent == nil || root.Children[0].Parent.ID != "root" { - t.Errorf("Parent not assigned to child node") +func mustNotExist(t *testing.T, path string) { + t.Helper() + _, err := os.Stat(path) + if err == nil { + t.Fatalf("expected %q to not exist, but it exists", path) } - if grandchild.Parent == nil || grandchild.Parent.ID != "child1" { - t.Errorf("Parent not assigned to grandchild node") + if !errors.Is(err, os.ErrNotExist) { + t.Fatalf("expected os.ErrNotExist for %q, got: %v", path, err) } } -func TestTreeService_LoadTree_DefaultOnMissing(t *testing.T) { +// --- A) Load/Save basics --- + +func TestTreeService_LoadTree_DefaultRootWhenMissing(t *testing.T) { tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - // Kein tree.json vorhanden → Default-Root - err := service.LoadTree() - if err != nil { - t.Fatalf("Expected to load default tree, got error: %v", err) + // schema current to prevent migration from failing due to missing schema file + if err := saveSchema(tmpDir, CurrentSchemaVersion); err != nil { + t.Fatalf("saveSchema failed: %v", err) + } + + svc := NewTreeService(tmpDir) + if err := svc.LoadTree(); err != nil { + t.Fatalf("LoadTree failed: %v", err) } - tree := service.GetTree() + tree := svc.GetTree() if tree == nil || tree.ID != "root" { - t.Errorf("Expected default root node, got: %+v", tree) + t.Fatalf("expected default root, got: %+v", tree) + } + if tree.Kind != NodeKindSection { + t.Fatalf("expected root to be section, got %q", tree.Kind) } } -func TestTreeService_CreatePage_RootLevel(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_SaveAndLoad_RoundtripParents(t *testing.T) { + svc, tmpDir := newLoadedService(t) - _, err := service.CreatePage("system", nil, "Welcome", "welcome") + // Create a small tree through public API (exercises disk + tree) + idA, err := svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) + if err != nil { + t.Fatalf("CreateNode A failed: %v", err) + } + _, err = svc.CreateNode("system", idA, "B", "b", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("CreateNode B failed: %v", err) } - tree := service.GetTree() - if len(tree.Children) != 1 { - t.Errorf("Expected 1 child at root level, got %d", len(tree.Children)) + if err := svc.SaveTree(); err != nil { + t.Fatalf("SaveTree failed: %v", err) } - child := tree.Children[0] - if child.Title != "Welcome" || child.Slug != "welcome" { - t.Errorf("Child has incorrect data: %+v", child) + // Reload in a new service instance + if err := saveSchema(tmpDir, CurrentSchemaVersion); err != nil { + t.Fatalf("saveSchema failed: %v", err) + } + loaded := NewTreeService(tmpDir) + if err := loaded.LoadTree(); err != nil { + t.Fatalf("LoadTree failed: %v", err) } - // Datei muss existieren - expectedPath := filepath.Join(tmpDir, "root", "welcome.md") - if _, err := os.Stat(expectedPath); os.IsNotExist(err) { - t.Errorf("Expected file not found: %s", expectedPath) + root := loaded.GetTree() + if len(root.Children) != 1 { + t.Fatalf("expected 1 child at root, got %d", len(root.Children)) + } + a := root.Children[0] + if a.Parent == nil || a.Parent.ID != "root" { + t.Fatalf("expected parent pointer on A") + } + if len(a.Children) != 1 { + t.Fatalf("expected A to have 1 child, got %d", len(a.Children)) + } + b := a.Children[0] + if b.Parent == nil || b.Parent.ID != a.ID { + t.Fatalf("expected parent pointer on B") } } -func TestTreeService_CreatePage_Nested(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +// --- B) Create/Update/Delete disk sync --- + +func TestTreeService_CreateNode_Page_Root_CreatesFileAndFrontmatter(t *testing.T) { + svc, tmpDir := newLoadedService(t) - // Zuerst einen Parent anlegen - _, err := service.CreatePage("system", nil, "Docs", "docs") + id, err := svc.CreateNode("system", nil, "Welcome", "welcome", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("Failed to create parent page: %v", err) + t.Fatalf("CreateNode failed: %v", err) } - // ID des Elternteils holen - parent := service.GetTree().Children[0] + // file path: /root/welcome.md (based on your existing tests + GeneratePath convention) + p := filepath.Join(tmpDir, "root", "welcome.md") + mustStat(t, p) - // Jetzt Subpage erstellen - _, err = service.CreatePage("system", &parent.ID, "Getting Started", "getting-started") + raw, err := os.ReadFile(p) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("read file: %v", err) } - if len(parent.Children) != 1 { - t.Errorf("Expected 1 child under parent, got %d", len(parent.Children)) + fm, _, has, err := markdown.ParseFrontmatter(string(raw)) + if err != nil { + t.Fatalf("ParseFrontmatter: %v", err) } - - sub := parent.Children[0] - if sub.Slug != "getting-started" { - t.Errorf("Unexpected slug: %s", sub.Slug) + if !has { + t.Fatalf("expected frontmatter to exist") } - - expected := filepath.Join(tmpDir, "root", "docs", "getting-started.md") - if _, err := os.Stat(expected); os.IsNotExist(err) { - t.Errorf("Expected nested file not found: %s", expected) + if strings.TrimSpace(fm.LeafWikiID) != *id { + t.Fatalf("expected leafwiki_id=%q, got %q", *id, fm.LeafWikiID) } } -func TestTreeService_CreatePage_InvalidParent(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_CreateChild_UnderPage_AutoConvertsParentToSection(t *testing.T) { + svc, tmpDir := newLoadedService(t) - invalidID := "does-not-exist" - _, err := service.CreatePage("system", &invalidID, "Broken", "broken") - if err == nil { - t.Errorf("Expected error for invalid parent ID, got none") + // Create parent as page + parentID, err := svc.CreateNode("system", nil, "Docs", "docs", ptrKind(NodeKindPage)) + if err != nil { + t.Fatalf("Create parent failed: %v", err) } -} -func TestTreeService_UpdatePage_ContentAndSlug(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() + // Should exist as file initially + parentFile := filepath.Join(tmpDir, "root", "docs.md") + mustStat(t, parentFile) - // Seite anlegen - _, err := service.CreatePage("system", nil, "Docs", "docs") + // Create child under parent: must convert parent to section + _, err = svc.CreateNode("system", parentID, "Getting Started", "getting-started", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("Create child failed: %v", err) } - page := service.GetTree().Children[0] - // Inhalt + Slug ändern - newSlug := "documentation" - newContent := "# Updated Docs" - err = service.UpdatePage("system", page.ID, "Documentation", newSlug, newContent) - if err != nil { - t.Fatalf("UpdatePage failed: %v", err) - } + // Parent should now be a folder with index.md (converted from docs.md) + parentDir := filepath.Join(tmpDir, "root", "docs") + mustStat(t, parentDir) + index := filepath.Join(parentDir, "index.md") + mustStat(t, index) - // Neuer Pfad sollte existieren - newPath := filepath.Join(tmpDir, "root", newSlug+".md") - if _, err := os.Stat(newPath); os.IsNotExist(err) { - t.Errorf("Expected updated file at %s not found", newPath) - } + // Old file should be gone + mustNotExist(t, parentFile) - // Inhalt prüfen - data, err := os.ReadFile(newPath) + // Child file should be inside folder + childFile := filepath.Join(parentDir, "getting-started.md") + mustStat(t, childFile) + + // Tree kind updated + parentNode, err := svc.FindPageByID(svc.GetTree().Children, *parentID) if err != nil { - t.Fatalf("Failed to read file: %v", err) + t.Fatalf("FindPageByID: %v", err) } - if string(data) != newContent { - t.Errorf("Expected content %q, got %q", newContent, string(data)) + if parentNode.Kind != NodeKindSection { + t.Fatalf("expected parent kind section, got %q", parentNode.Kind) } } -func TestTreeService_UpdatePage_FileNotFound(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_UpdateNode_TitleOnly_SyncsFrontmatterIfFileExists(t *testing.T) { + svc, tmpDir := newLoadedService(t) - // Seite im Baum erzeugen, aber Datei nicht schreiben - id := "ghost" - page := &PageNode{ - ID: id, - Title: "Ghost", - Slug: "ghost", - Parent: service.tree, + id, err := svc.CreateNode("system", nil, "Docs", "docs", ptrKind(NodeKindPage)) + if err != nil { + t.Fatalf("CreateNode failed: %v", err) } - service.tree.Children = append(service.tree.Children, page) - // Versuch zu aktualisieren - err := service.UpdatePage("system", id, "Still Ghost", "still-ghost", "# Boo") - if err == nil { - t.Error("Expected error when file does not exist") - } -} + p := filepath.Join(tmpDir, "root", "docs.md") + mustStat(t, p) -func TestTreeService_UpdatePage_InvalidID(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() + // Update title only: content=nil, slug unchanged + if err := svc.UpdateNode("system", *id, "Documentation", "docs", nil); err != nil { + t.Fatalf("UpdateNode failed: %v", err) + } - err := service.UpdatePage("system", "unknown", "Nope", "nope", "# nope") - if err == nil { - t.Error("Expected error for invalid ID, got none") + raw, err := os.ReadFile(p) + if err != nil { + t.Fatalf("read: %v", err) + } + fm, _, has, err := markdown.ParseFrontmatter(string(raw)) + if err != nil { + t.Fatalf("ParseFrontmatter: %v", err) + } + if !has { + t.Fatalf("expected frontmatter") + } + if fm.LeafWikiTitle != "Documentation" { + t.Fatalf("expected leafwiki_title to be updated, got %q", fm.LeafWikiTitle) } } -func TestTreeService_DeletePage_Success(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_UpdateNode_SlugRename_RenamesOnDisk(t *testing.T) { + svc, tmpDir := newLoadedService(t) - // Seite erstellen - _, err := service.CreatePage("system", nil, "DeleteMe", "delete-me") + id, err := svc.CreateNode("system", nil, "Docs", "docs", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("CreateNode failed: %v", err) } - page := service.GetTree().Children[0] - // Löschen - err = service.DeletePage("system", page.ID, false) - if err != nil { - t.Fatalf("DeletePage failed: %v", err) - } + oldPath := filepath.Join(tmpDir, "root", "docs.md") + mustStat(t, oldPath) - // Datei darf nicht mehr existieren - path := filepath.Join(tmpDir, "root", "delete-me.md") - if _, err := os.Stat(path); !os.IsNotExist(err) { - t.Errorf("Expected file to be deleted: %s", path) + newSlug := "documentation" + if err := svc.UpdateNode("system", *id, "Docs", newSlug, nil); err != nil { + t.Fatalf("UpdateNode failed: %v", err) } - // Seite sollte aus Tree entfernt worden sein - if len(service.GetTree().Children) != 0 { - t.Errorf("Expected page to be removed from tree") - } + newPath := filepath.Join(tmpDir, "root", newSlug+".md") + mustStat(t, newPath) + mustNotExist(t, oldPath) } -func TestTreeService_DeletePage_HasChildrenWithoutRecursive(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +/* +Disable this test for now as we are not enforcing to pass the kinds yet. +func TestTreeService_UpdateNode_SectionToPage_DisallowedWithChildren(t *testing.T) { + svc, _ := newLoadedService(t) - // Parent + Child - _, err := service.CreatePage("system", nil, "Parent", "parent") + // Create parent page, then child to force parent to section + parentID, err := svc.CreateNode("system", nil, "Docs", "docs", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("Create parent failed: %v", err) } - parent := service.GetTree().Children[0] - - _, err = service.CreatePage("system", &parent.ID, "Child", "child") + _, err = svc.CreateNode("system", parentID, "Child", "child", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage (child) failed: %v", err) + t.Fatalf("Create child failed: %v", err) } - // Versuch ohne Rekursion - err = service.DeletePage("system", parent.ID, false) + // Now parent is section with children, attempt to convert back to page + err = svc.UpdateNode("system", *parentID, "Docs", "docs", nil) if err == nil { - t.Error("Expected error when deleting parent with children without recursive flag") + t.Fatalf("expected error converting section->page with children") + } + if !errors.Is(err, ErrPageHasChildren) { + t.Fatalf("expected ErrPageHasChildren, got: %v", err) } } +*/ -func TestTreeService_DeletePage_InvalidID(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_DeleteNode_NonRecursiveErrorsWhenHasChildren(t *testing.T) { + svc, _ := newLoadedService(t) + + parentID, _ := svc.CreateNode("system", nil, "Parent", "parent", ptrKind(NodeKindPage)) + _, _ = svc.CreateNode("system", parentID, "Child", "child", ptrKind(NodeKindPage)) - err := service.DeletePage("system", "nonexistent", false) + err := svc.DeleteNode("system", *parentID, false) if err == nil { - t.Error("Expected error for unknown ID") + t.Fatalf("expected error") + } + if !errors.Is(err, ErrPageHasChildren) { + t.Fatalf("expected ErrPageHasChildren, got: %v", err) } } -func TestTreeService_DeletePage_Recursive(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_DeleteNode_RecursiveDeletesDiskAndTree(t *testing.T) { + svc, tmpDir := newLoadedService(t) - // Parent → Child - _, err := service.CreatePage("system", nil, "Parent", "parent") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) - } - parent := service.GetTree().Children[0] + parentID, _ := svc.CreateNode("system", nil, "Parent", "parent", ptrKind(NodeKindPage)) + _, _ = svc.CreateNode("system", parentID, "Child", "child", ptrKind(NodeKindPage)) - _, err = service.CreatePage("system", &parent.ID, "Child", "child") - if err != nil { - t.Fatalf("CreatePage (child) failed: %v", err) - } + // Parent should now be a folder + parentDir := filepath.Join(tmpDir, "root", "parent") + mustStat(t, parentDir) - // Rekursiv löschen - err = service.DeletePage("system", parent.ID, true) + err := svc.DeleteNode("system", *parentID, true) if err != nil { - t.Fatalf("Expected recursive delete to succeed, got error: %v", err) + t.Fatalf("DeleteNode recursive failed: %v", err) } - parentPath := filepath.Join(tmpDir, "root", "parent") - if _, err := os.Stat(parentPath); !os.IsNotExist(err) { - t.Errorf("Expected parent folder to be deleted") + // Folder should be gone + mustNotExist(t, parentDir) + + // Tree should have no children at root + if len(svc.GetTree().Children) != 0 { + t.Fatalf("expected root to have no children") } } -func TestTreeService_MovePage_FileToFolder(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_DeletePage_Leaf_Success_RemovesFileAndTreeAndReindexes(t *testing.T) { + svc, tmpDir := newLoadedService(t) - // Create root → a, root → b - _, err := service.CreatePage("system", nil, "A", "a") + // Create 3 leaf pages + idA, err := svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage A failed: %v", err) + t.Fatalf("CreateNode A: %v", err) } - _, err = service.CreatePage("system", nil, "B", "b") + idB, err := svc.CreateNode("system", nil, "B", "b", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage B failed: %v", err) + t.Fatalf("CreateNode B: %v", err) } - - a := service.GetTree().Children[0] - b := service.GetTree().Children[1] - - err = service.MovePage("system", a.ID, b.ID) + idC, err := svc.CreateNode("system", nil, "C", "c", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("MovePage failed: %v", err) + t.Fatalf("CreateNode C: %v", err) } - // Erwartung: a ist jetzt unter b - if len(b.Children) != 1 || b.Children[0].ID != a.ID { - t.Errorf("Expected page A to be moved under B") + // Verify files exist + pathA := filepath.Join(tmpDir, "root", "a.md") + pathB := filepath.Join(tmpDir, "root", "b.md") + pathC := filepath.Join(tmpDir, "root", "c.md") + if _, err := os.Stat(pathB); err != nil { + t.Fatalf("expected %s exists: %v", pathB, err) } - // Datei existiert im neuen Pfad - expected := filepath.Join(tmpDir, "root", "b", "a.md") - if _, err := os.Stat(expected); os.IsNotExist(err) { - t.Errorf("Expected moved file: %v", expected) + // Delete middle page (B) + if err := svc.DeleteNode("system", *idB, false); err != nil { + t.Fatalf("DeleteNode failed: %v", err) } -} -func TestTreeService_MovePage_NonexistentPage(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() + // Disk: B gone; A/C still there + if _, err := os.Stat(pathB); !errors.Is(err, os.ErrNotExist) { + t.Fatalf("expected %s to be deleted, got err=%v", pathB, err) + } + if _, err := os.Stat(pathA); err != nil { + t.Fatalf("expected %s exists: %v", pathA, err) + } + if _, err := os.Stat(pathC); err != nil { + t.Fatalf("expected %s exists: %v", pathC, err) + } - // Create only one page - _, err := service.CreatePage("system", nil, "Target", "target") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) + // Tree: only 2 children remain + root := svc.GetTree() + if len(root.Children) != 2 { + t.Fatalf("expected 2 children after delete, got %d", len(root.Children)) } - target := service.GetTree().Children[0] - // Versuch mit ungültiger ID - err = service.MovePage("system", "does-not-exist", target.ID) - if err == nil { - t.Error("Expected error for non-existent source page") + // Ensure deleted ID not present + for _, ch := range root.Children { + if ch.ID == *idB { + t.Fatalf("deleted node still present in tree") + } + } + + // Reindex: positions must be 0..1 (order depends on previous positions; we just assert contiguous) + if root.Children[0].Position != 0 || root.Children[1].Position != 1 { + t.Fatalf("expected positions reindexed to 0..1, got %d,%d", + root.Children[0].Position, root.Children[1].Position) } + + // Optional: ensure remaining IDs are the ones we expect + _ = idA + _ = idC } -func TestTreeService_MovePage_NonexistentTarget(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_DeletePage_WithChildren_NonRecursive_ReturnsErrPageHasChildren(t *testing.T) { + svc, _ := newLoadedService(t) + + parentID, err := svc.CreateNode("system", nil, "Parent", "parent", ptrKind(NodeKindPage)) + if err != nil { + t.Fatalf("CreateNode parent: %v", err) + } - _, err := service.CreatePage("system", nil, "Source", "source") + _, err = svc.CreateNode("system", parentID, "Child", "child", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("CreateNode child: %v", err) } - source := service.GetTree().Children[0] - err = service.MovePage("system", source.ID, "invalid-target-id") + err = svc.DeleteNode("system", *parentID, false) if err == nil { - t.Error("Expected error for non-existent target") + t.Fatalf("expected error deleting page with children without recursive") + } + if !errors.Is(err, ErrPageHasChildren) { + t.Fatalf("expected ErrPageHasChildren, got: %v", err) } } -func TestTreeService_MovePage_SelfAsParent(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_DeletePage_WithChildren_Recursive_DeletesFolder(t *testing.T) { + svc, tmpDir := newLoadedService(t) - _, err := service.CreatePage("system", nil, "Loop", "loop") + parentID, err := svc.CreateNode("system", nil, "Parent", "parent", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("CreateNode parent: %v", err) + } + _, err = svc.CreateNode("system", parentID, "Child", "child", ptrKind(NodeKindPage)) + if err != nil { + t.Fatalf("CreateNode child: %v", err) } - node := service.GetTree().Children[0] - err = service.MovePage("system", node.ID, node.ID) - if err == nil { - t.Error("Expected error when moving page into itself (if you later implement such protection)") + // Parent was auto-converted to section -> folder should exist + parentDir := filepath.Join(tmpDir, "root", "parent") + if _, err := os.Stat(parentDir); err != nil { + t.Fatalf("expected parent dir exists (after auto-convert): %v", err) } -} -func TestTreeService_FindPageByRoutePath_Success(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() + // Recursive delete should remove the folder + if err := svc.DeleteNode("system", *parentID, true); err != nil { + t.Fatalf("DeleteNode recursive failed: %v", err) + } - // Tree: root → architecture → project-a → specs - _, err := service.CreatePage("system", nil, "Architecture", "architecture") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) + if _, err := os.Stat(parentDir); !errors.Is(err, os.ErrNotExist) { + t.Fatalf("expected parent folder deleted, got err=%v", err) } - arch := service.GetTree().Children[0] - _, err = service.CreatePage("system", &arch.ID, "Project A", "project-a") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) + // Tree should no longer contain parent + if len(svc.GetTree().Children) != 0 { + t.Fatalf("expected root to have no children after delete, got %d", len(svc.GetTree().Children)) } - projectA := arch.Children[0] +} - _, err = service.CreatePage("system", &projectA.ID, "Specs", "specs") - if err != nil { - t.Fatalf("CreatePage failed: %v", err) +func TestTreeService_DeletePage_InvalidID_ReturnsErrPageNotFound(t *testing.T) { + svc, _ := newLoadedService(t) + + err := svc.DeleteNode("system", "does-not-exist", false) + if err == nil { + t.Fatalf("expected error") + } + if !errors.Is(err, ErrPageNotFound) { + t.Fatalf("expected ErrPageNotFound, got: %v", err) } +} + +func TestTreeService_DeletePage_Drift_FileMissing_ReturnsError(t *testing.T) { + svc, tmpDir := newLoadedService(t) - // Datei anlegen - specPath := filepath.Join(tmpDir, "root", "architecture", "project-a", "specs.md") - err = os.WriteFile(specPath, []byte("# Project A Specs"), 0644) + // Create a leaf page normally (creates file) + id, err := svc.CreateNode("system", nil, "Ghost", "ghost", ptrKind(NodeKindPage)) if err != nil { - t.Fatalf("Failed to write specs file: %v", err) + t.Fatalf("CreateNode: %v", err) } - // 🔍 Suche über RoutePath - page, err := service.FindPageByRoutePath(service.GetTree().Children, "architecture/project-a/specs") - if err != nil { - t.Fatalf("Expected page, got error: %v", err) + // Delete the file manually to simulate drift + p := filepath.Join(tmpDir, "root", "ghost.md") + if err := os.Remove(p); err != nil { + t.Fatalf("failed to remove file to simulate drift: %v", err) } - if page.Slug != "specs" || !strings.Contains(page.Content, "Specs") { - t.Errorf("Unexpected page content or slug") + // Now delete node - should error (drift) + err = svc.DeleteNode("system", *id, false) + if err == nil { + t.Fatalf("expected drift error") + } + // If you have a concrete DriftError type, you can assert with errors.As. + var dErr *DriftError + if !errors.As(err, &dErr) { + t.Fatalf("expected DriftError, got: %T (%v)", err, err) } } -func TestTreeService_FindPageByRoutePath_NotFound(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +// --- C) Move semantics --- - if _, err := service.CreatePage("system", nil, "Top", "top"); err != nil { - t.Fatalf("CreatePage failed: %v", err) - } +func TestTreeService_MoveNode_TargetPageAutoConvertsToSection(t *testing.T) { + svc, tmpDir := newLoadedService(t) + + aID, _ := svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) + bID, _ := svc.CreateNode("system", nil, "B", "b", ptrKind(NodeKindPage)) - if _, err := service.FindPageByRoutePath(service.GetTree().Children, "top/missing"); err == nil { - t.Error("Expected error for non-existent nested path, got nil") + // Move A under B (B is a page => should auto-convert to section) + if err := svc.MoveNode("system", *aID, *bID); err != nil { + t.Fatalf("MoveNode failed: %v", err) } + + // B should now be folder with index.md + bDir := filepath.Join(tmpDir, "root", "b") + mustStat(t, bDir) + mustStat(t, filepath.Join(bDir, "index.md")) + + // A should now be inside B folder + aPath := filepath.Join(bDir, "a.md") + mustStat(t, aPath) } -func TestTreeService_FindPageByRoutePath_PartialMatch(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_MoveNode_PreventsCircularReference(t *testing.T) { + svc, _ := newLoadedService(t) - if _, err := service.CreatePage("system", nil, "Docs", "docs"); err != nil { - t.Fatalf("CreatePage failed: %v", err) - } + aID, _ := svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) + // create child under A so A becomes section and has child + bID, _ := svc.CreateNode("system", aID, "B", "b", ptrKind(NodeKindPage)) - if _, err := service.CreatePage("system", nil, "API", "api"); err != nil { - t.Fatalf("CreatePage failed: %v", err) + // Try move A under B (A -> ... -> B). Should error with circular reference. + err := svc.MoveNode("system", *aID, *bID) + if err == nil { + t.Fatalf("expected error moving node under its descendant") } - - if _, err := service.FindPageByRoutePath(service.GetTree().Children, "docs/should-not-exist"); err == nil { - t.Error("Expected error for unmatched subpath") + if !errors.Is(err, ErrMovePageCircularReference) { + t.Fatalf("expected ErrMovePageCircularReference, got: %v", err) } } -func setupTestTree() *TreeService { - ts := NewTreeService(os.TempDir()) - ts.tree = &PageNode{ - ID: "root", - Title: "Root", - Children: []*PageNode{ - {ID: "a", Title: "A"}, - {ID: "b", Title: "B"}, - {ID: "c", Title: "C"}, - }, +func TestTreeService_MoveNode_PreventsSelfParent(t *testing.T) { + svc, _ := newLoadedService(t) + + aID, _ := svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) + + err := svc.MoveNode("system", *aID, *aID) + if err == nil { + t.Fatalf("expected error moving node into itself") + } + if !errors.Is(err, ErrPageCannotBeMovedToItself) { + t.Fatalf("expected ErrPageCannotBeMovedToItself, got: %v", err) } - return ts } +// --- D) SortPages --- + func TestTreeService_SortPages_ValidOrder(t *testing.T) { - ts := setupTestTree() + svc, _ := newLoadedService(t) - err := ts.SortPages("root", []string{"c", "a", "b"}) + idA, _ := svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) + idB, _ := svc.CreateNode("system", nil, "B", "b", ptrKind(NodeKindPage)) + idC, _ := svc.CreateNode("system", nil, "C", "c", ptrKind(NodeKindPage)) + + err := svc.SortPages("root", []string{*idC, *idA, *idB}) if err != nil { - t.Fatalf("unexpected error: %v", err) + t.Fatalf("SortPages failed: %v", err) } - if ts.tree.Children[0].ID != "c" || ts.tree.Children[1].ID != "a" || ts.tree.Children[2].ID != "b" { - t.Errorf("unexpected order after sorting") + root := svc.GetTree() + if root.Children[0].ID != *idC || root.Children[1].ID != *idA || root.Children[2].ID != *idB { + t.Fatalf("unexpected order after sort") + } + if root.Children[0].Position != 0 || root.Children[1].Position != 1 || root.Children[2].Position != 2 { + t.Fatalf("expected positions to be reindexed") } } func TestTreeService_SortPages_InvalidLength(t *testing.T) { - ts := setupTestTree() + svc, _ := newLoadedService(t) - err := ts.SortPages("root", []string{"a", "b"}) + _, _ = svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) + _, _ = svc.CreateNode("system", nil, "B", "b", ptrKind(NodeKindPage)) + + err := svc.SortPages("root", []string{"only-one"}) if err == nil { - t.Errorf("expected error for invalid length, got nil") + t.Fatalf("expected error for invalid length") + } + if !errors.Is(err, ErrInvalidSortOrder) { + t.Fatalf("expected ErrInvalidSortOrder, got: %v", err) } } -func TestTreeService_SortPages_InvalidID(t *testing.T) { - ts := setupTestTree() +func TestTreeService_SortPages_DuplicateID(t *testing.T) { + svc, _ := newLoadedService(t) + + idA, _ := svc.CreateNode("system", nil, "A", "a", ptrKind(NodeKindPage)) + idB, _ := svc.CreateNode("system", nil, "B", "b", ptrKind(NodeKindPage)) - err := ts.SortPages("root", []string{"a", "b", "x"}) + err := svc.SortPages("root", []string{*idA, *idA, *idB}) if err == nil { - t.Errorf("expected error for invalid ID, got nil") + t.Fatalf("expected error for duplicate IDs") } } -func TestTreeService_SortPages_DuplicateID(t *testing.T) { - ts := setupTestTree() +// --- E) Routing, Lookup, Ensure --- - err := ts.SortPages("root", []string{"a", "a", "b"}) - if err == nil { - t.Errorf("expected error for duplicate ID, got nil") +func TestTreeService_FindPageByRoutePath_ReturnsContent(t *testing.T) { + svc, _ := newLoadedService(t) + + archID, _ := svc.CreateNode("system", nil, "Architecture", "architecture", ptrKind(NodeKindPage)) + // create child -> converts arch to section + projectID, _ := svc.CreateNode("system", archID, "Project A", "project-a", ptrKind(NodeKindPage)) + _, _ = svc.CreateNode("system", projectID, "Specs", "specs", ptrKind(NodeKindPage)) + + // Update specs content + specsNode := svc.GetTree().Children[0].Children[0].Children[0] + body := "# Specs\nHello" + if err := svc.UpdateNode("system", specsNode.ID, "Specs", "specs", &body); err != nil { + t.Fatalf("UpdateNode content failed: %v", err) } -} -func TestTreeService_SortPages_EmptyOK(t *testing.T) { - ts := NewTreeService(t.TempDir()) - ts.tree = &PageNode{ - ID: "root", - Title: "Root", - Children: []*PageNode{}, + page, err := svc.FindPageByRoutePath(svc.GetTree().Children, "architecture/project-a/specs") + if err != nil { + t.Fatalf("FindPageByRoutePath failed: %v", err) + } + if page.Slug != "specs" { + t.Fatalf("expected slug specs, got %q", page.Slug) } + if !strings.Contains(page.Content, "Hello") { + t.Fatalf("expected content to include Hello, got: %q", page.Content) + } +} + +func TestTreeService_LookupPagePath_Segments(t *testing.T) { + svc, _ := newLoadedService(t) + + homeID, _ := svc.CreateNode("system", nil, "Home", "home", ptrKind(NodeKindPage)) + _, _ = svc.CreateNode("system", homeID, "About", "about", ptrKind(NodeKindPage)) - err := ts.SortPages("root", []string{}) + lookup, err := svc.LookupPagePath(svc.GetTree().Children, "home/about/team") if err != nil { - t.Fatalf("unexpected error for empty list: %v", err) + t.Fatalf("LookupPagePath failed: %v", err) + } + if lookup.Exists { + t.Fatalf("expected full path to not exist") + } + if len(lookup.Segments) != 3 { + t.Fatalf("expected 3 segments, got %d", len(lookup.Segments)) + } + if !lookup.Segments[0].Exists || lookup.Segments[0].ID == nil { + t.Fatalf("expected home segment to exist with ID") + } + if !lookup.Segments[1].Exists || lookup.Segments[1].ID == nil { + t.Fatalf("expected about segment to exist with ID") + } + if lookup.Segments[2].Exists || lookup.Segments[2].ID != nil { + t.Fatalf("expected team to not exist") } } -func TestTreeService_SortPages_TreeNotLoaded(t *testing.T) { - ts := &TreeService{ - tree: nil, +func TestTreeService_EnsurePagePath_CreatesIntermediateSectionsAndFinalPage(t *testing.T) { + svc, _ := newLoadedService(t) + + // Ensure a deep path; intermediate nodes should become sections + res, err := svc.EnsurePagePath("system", "home/about/team/members", "Members", ptrKind(NodeKindPage)) + if err != nil { + t.Fatalf("EnsurePagePath failed: %v", err) + } + if res.Page == nil || res.Page.Slug != "members" { + t.Fatalf("expected final page 'members'") } - err := ts.SortPages("root", []string{"a"}) - if err == nil || !errors.Is(err, ErrTreeNotLoaded) { - t.Errorf("expected ErrTreeNotLoaded, got: %v", err) + // home/about/team should exist as path now + lookup, err := svc.LookupPagePath(svc.GetTree().Children, "home/about/team/members") + if err != nil { + t.Fatalf("LookupPagePath failed: %v", err) + } + if !lookup.Exists { + t.Fatalf("expected path to exist after EnsurePagePath") } } -func TestTreeService_LookupPath_Exists(t *testing.T) { +// --- F) Migration V2 (frontmatter backfill) --- +func TestTreeService_LoadTree_MigratesToV2_AddsFrontmatterAndPreservesBody(t *testing.T) { + if CurrentSchemaVersion < 2 { + t.Skip("requires schema v2+") + } + tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - // Create tree structure - _, _ = service.CreatePage("system", nil, "Home", "home") - home := service.GetTree().Children[0] - _, _ = service.CreatePage("system", &home.ID, "About", "about") - about := home.Children[0] - _, _ = service.CreatePage("system", &about.ID, "Team", "team") + // start on v1 (or generally: current-1) + if err := saveSchema(tmpDir, CurrentSchemaVersion-1); err != nil { + t.Fatalf("saveSchema failed: %v", err) + } - lookup, err := service.LookupPagePath(service.GetTree().Children, "home/about/team") - if err != nil { - t.Fatalf("unexpected error: %v", err) + svc := NewTreeService(tmpDir) + if err := svc.LoadTree(); err != nil { + t.Fatalf("LoadTree failed: %v", err) } - if !lookup.Exists { - t.Errorf("expected path to exist") + id, err := svc.CreateNode("system", nil, "Page1", "page1", ptrKind(NodeKindPage)) + if err != nil { + t.Fatalf("CreateNode failed: %v", err) } - if len(lookup.Segments) != 3 { - t.Errorf("expected 3 segments, got %d", len(lookup.Segments)) + + // IMPORTANT: persist tree so the next service instance sees the node + if err := svc.SaveTree(); err != nil { + t.Fatalf("SaveTree failed: %v", err) } - if !lookup.Segments[2].Exists || lookup.Segments[2].ID == nil || lookup.Segments[2].Slug != "team" { - t.Errorf("expected last segment to exist with correct Slug") + + // overwrite file without FM + pagePath := filepath.Join(tmpDir, "root", "page1.md") + body := "# Page 1 Content\nHello World\n" + if err := os.WriteFile(pagePath, []byte(body), 0o644); err != nil { + t.Fatalf("write old content failed: %v", err) } -} -func TestTreeService_LookupPath_NotExists(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() + // force schema old again + if err := saveSchema(tmpDir, CurrentSchemaVersion-1); err != nil { + t.Fatalf("saveSchema failed: %v", err) + } - // Create tree structure - _, _ = service.CreatePage("system", nil, "Home", "home") - home := service.GetTree().Children[0] - _, _ = service.CreatePage("system", &home.ID, "About", "about") + loaded := NewTreeService(tmpDir) + if err := loaded.LoadTree(); err != nil { + t.Fatalf("LoadTree (migrating) failed: %v", err) + } - lookup, err := service.LookupPagePath(service.GetTree().Children, "home/about/contact") + raw, err := os.ReadFile(pagePath) if err != nil { - t.Fatalf("unexpected error: %v", err) + t.Fatalf("read migrated file: %v", err) } - if lookup.Exists { - t.Errorf("expected path to not exist") + fm, migratedBody, has, err := markdown.ParseFrontmatter(string(raw)) + if err != nil { + t.Fatalf("ParseFrontmatter: %v", err) } - if len(lookup.Segments) != 3 { - t.Errorf("expected 3 segments, got %d", len(lookup.Segments)) + if !has { + t.Fatalf("expected frontmatter after migration, got:\n%s", string(raw)) } - if !lookup.Segments[1].Exists || lookup.Segments[1].ID == nil || lookup.Segments[1].Slug != "about" { - t.Errorf("expected second segment to exist with correct Slug") + if fm.LeafWikiID != *id { + t.Fatalf("expected leafwiki_id=%q, got %q", *id, fm.LeafWikiID) } - if lookup.Segments[2].Exists || lookup.Segments[2].ID != nil || lookup.Segments[2].Slug != "contact" { - t.Errorf("expected last segment to not exist with correct Slug") + if strings.TrimSpace(fm.LeafWikiTitle) == "" { + t.Fatalf("expected leafwiki_title to be set") + } + if migratedBody != body { + t.Fatalf("expected body preserved exactly.\nGot:\n%q\nWant:\n%q", migratedBody, body) } } -func TestTreeService_LookupPath_EmptyPath(t *testing.T) { +// TestTreeService_ReconstructTreeFromFS_UpdatesSchemaVersion verifies that +// ReconstructTreeFromFS writes the current schema version to prevent unnecessary migrations +func TestTreeService_ReconstructTreeFromFS_UpdatesSchemaVersion(t *testing.T) { tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - lookup, err := service.LookupPagePath(service.GetTree().Children, "") - if err != nil { - t.Fatalf("unexpected error: %v", err) + // Create a minimal file structure for reconstruction + mustMkdir(t, filepath.Join(tmpDir, "root")) + mustWriteFile(t, filepath.Join(tmpDir, "root", "test.md"), "# Test Page", 0o644) + + // Create service WITHOUT schema.json (simulating an old/missing schema) + svc := NewTreeService(tmpDir) + + // Reconstruct the tree (no prior tree loaded) + if err := svc.ReconstructTreeFromFS(); err != nil { + t.Fatalf("ReconstructTreeFromFS failed: %v", err) } - if lookup.Exists { - t.Errorf("expected empty path to not exist") + // Verify schema.json was created with current version + schema, err := loadSchema(tmpDir) + if err != nil { + t.Fatalf("loadSchema failed: %v", err) } - if len(lookup.Segments) != 0 { - t.Errorf("expected 0 segments, got %d", len(lookup.Segments)) + + if schema.Version != CurrentSchemaVersion { + t.Errorf("expected schema version %d after reconstruction, got %d", CurrentSchemaVersion, schema.Version) } + + // Verify tree.json was also created + mustStat(t, filepath.Join(tmpDir, "tree.json")) } -func TestTreeService_LookupPath_DeeperMissingPath(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +// --- G) ReconstructTreeFromFS --- + +func TestTreeService_ReconstructTreeFromFS_BackfillsMetadata(t *testing.T) { + svc, tmpDir := newLoadedService(t) + + // Create some files on disk manually (simulating external changes) + mustWriteFile(t, filepath.Join(tmpDir, "root", "page1.md"), `--- +leafwiki_id: page-1 +leafwiki_title: Page One +--- +# Page One`, 0o644) + + mustMkdir(t, filepath.Join(tmpDir, "root", "section1")) + mustWriteFile(t, filepath.Join(tmpDir, "root", "section1", "index.md"), `--- +leafwiki_id: sec-1 +leafwiki_title: Section One +--- +# Section One`, 0o644) + + mustWriteFile(t, filepath.Join(tmpDir, "root", "section1", "page2.md"), `--- +leafwiki_id: page-2 +leafwiki_title: Page Two +--- +# Page Two`, 0o644) + + // Reconstruct the tree from filesystem + err := svc.ReconstructTreeFromFS() + if err != nil { + t.Fatalf("ReconstructTreeFromFS failed: %v", err) + } - _, _ = service.CreatePage("system", nil, "Home", "home") - home := service.GetTree().Children[0] - _, _ = service.CreatePage("system", &home.ID, "About", "about") + // Verify metadata was backfilled for all nodes + tree := svc.GetTree() - lookup, err := service.LookupPagePath(service.GetTree().Children, "home/about/team/members") - if err != nil { - t.Fatalf("unexpected error: %v", err) + // Check root metadata + if tree.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected root metadata CreatedAt to be backfilled, got zero") + } + if tree.Metadata.UpdatedAt.IsZero() { + t.Fatalf("expected root metadata UpdatedAt to be backfilled, got zero") } - if lookup.Exists { - t.Errorf("expected path to not exist") + // Find and verify page1 + page1 := findChildBySlug(t, tree, "page1") + if page1.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected page1 metadata CreatedAt to be backfilled, got zero") + } + if page1.Metadata.UpdatedAt.IsZero() { + t.Fatalf("expected page1 metadata UpdatedAt to be backfilled, got zero") } - if len(lookup.Segments) != 4 { - t.Errorf("expected 4 segments, got %d", len(lookup.Segments)) + + // Find and verify section1 + section1 := findChildBySlug(t, tree, "section1") + if section1.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected section1 metadata CreatedAt to be backfilled, got zero") } - if !lookup.Segments[1].Exists || lookup.Segments[1].ID == nil || lookup.Segments[1].Slug != "about" { - t.Errorf("expected second segment to exist with correct Slug") + if section1.Metadata.UpdatedAt.IsZero() { + t.Fatalf("expected section1 metadata UpdatedAt to be backfilled, got zero") } - if lookup.Segments[2].Exists || lookup.Segments[2].ID != nil || lookup.Segments[2].Slug != "team" { - t.Errorf("expected third segment to not exist with correct Slug") + + // Find and verify page2 (child of section1) + page2 := findChildBySlug(t, section1, "page2") + if page2.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected page2 metadata CreatedAt to be backfilled, got zero") } - if lookup.Segments[3].Exists || lookup.Segments[3].ID != nil || lookup.Segments[3].Slug != "members" { - t.Errorf("expected last segment to not exist with correct Slug") + if page2.Metadata.UpdatedAt.IsZero() { + t.Fatalf("expected page2 metadata UpdatedAt to be backfilled, got zero") } } -func TestTreeService_LookupPath_OnlyOneSegment(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_ReconstructTreeFromFS_PersistsTreeJSON(t *testing.T) { + svc, tmpDir := newLoadedService(t) + + // Create some files on disk manually + mustWriteFile(t, filepath.Join(tmpDir, "root", "readme.md"), `--- +leafwiki_id: readme-page +leafwiki_title: README +--- +# README`, 0o644) - _, _ = service.CreatePage("system", nil, "Home", "home") + // Verify tree.json doesn't exist or is empty before reconstruction + treeJSONPath := filepath.Join(tmpDir, "tree.json") - lookup, err := service.LookupPagePath(service.GetTree().Children, "home") + // Reconstruct the tree from filesystem + err := svc.ReconstructTreeFromFS() if err != nil { - t.Fatalf("unexpected error: %v", err) + t.Fatalf("ReconstructTreeFromFS failed: %v", err) } - if !lookup.Exists { - t.Errorf("expected path to exist") + // Verify tree.json was persisted + info := mustStat(t, treeJSONPath) + if info.Size() == 0 { + t.Fatalf("expected tree.json to have content after reconstruction, got size 0") } - if len(lookup.Segments) != 1 { - t.Errorf("expected 1 segment, got %d", len(lookup.Segments)) + + // Verify we can reload the tree from the saved tree.json + newSvc := NewTreeService(tmpDir) + if err := newSvc.LoadTree(); err != nil { + t.Fatalf("LoadTree after reconstruction failed: %v", err) } - if !lookup.Segments[0].Exists || lookup.Segments[0].ID == nil || lookup.Segments[0].Slug != "home" { - t.Errorf("expected segment to exist with correct Slug") + + // Verify the tree structure matches + tree := newSvc.GetTree() + if tree == nil || tree.ID != "root" { + t.Fatalf("expected root node after reload, got: %+v", tree) } -} -func TestTreeService_EnsurePagePath_Successful(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() + // Verify the readme page exists + readme := findChildBySlug(t, tree, "readme") + if readme.ID != "readme-page" { + t.Fatalf("expected readme ID to be 'readme-page', got %q", readme.ID) + } + if readme.Title != "README" { + t.Fatalf("expected readme title to be 'README', got %q", readme.Title) + } - _, _ = service.CreatePage("system", nil, "Home", "home") - home := service.GetTree().Children[0] - _, _ = service.CreatePage("system", &home.ID, "About", "about") + // Verify metadata was persisted + if readme.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected persisted metadata CreatedAt to not be zero") + } + if readme.Metadata.UpdatedAt.IsZero() { + t.Fatalf("expected persisted metadata UpdatedAt to not be zero") + } +} - result, err := service.EnsurePagePath("system", "home/about/team", "Team") +func TestTreeService_ReconstructTreeFromFS_ComplexTree_PreservesStructure(t *testing.T) { + svc, tmpDir := newLoadedService(t) + + // Create a complex tree structure on disk + mustWriteFile(t, filepath.Join(tmpDir, "root", "intro.md"), `--- +leafwiki_id: intro +leafwiki_title: Introduction +--- +# Introduction`, 0o644) + + mustMkdir(t, filepath.Join(tmpDir, "root", "docs")) + mustWriteFile(t, filepath.Join(tmpDir, "root", "docs", "index.md"), `--- +leafwiki_id: docs-section +leafwiki_title: Documentation +--- +# Documentation`, 0o644) + + mustWriteFile(t, filepath.Join(tmpDir, "root", "docs", "getting-started.md"), `--- +leafwiki_id: getting-started +leafwiki_title: Getting Started +--- +# Getting Started`, 0o644) + + mustMkdir(t, filepath.Join(tmpDir, "root", "docs", "guides")) + mustWriteFile(t, filepath.Join(tmpDir, "root", "docs", "guides", "index.md"), `--- +leafwiki_id: guides-section +leafwiki_title: Guides +--- +# Guides`, 0o644) + + mustWriteFile(t, filepath.Join(tmpDir, "root", "docs", "guides", "basic.md"), `--- +leafwiki_id: basic-guide +leafwiki_title: Basic Guide +--- +# Basic Guide`, 0o644) + + // Reconstruct + err := svc.ReconstructTreeFromFS() if err != nil { - t.Fatalf("unexpected error: %v", err) + t.Fatalf("ReconstructTreeFromFS failed: %v", err) } - if !result.Exists { - t.Errorf("expected path to exist after creation") + tree := svc.GetTree() + + // Verify structure + intro := findChildBySlug(t, tree, "intro") + if intro.Kind != NodeKindPage { + t.Fatalf("expected intro to be a page, got %q", intro.Kind) + } + + docs := findChildBySlug(t, tree, "docs") + if docs.Kind != NodeKindSection { + t.Fatalf("expected docs to be a section, got %q", docs.Kind) } - if result.Page == nil || result.Page.Slug != "team" || result.Page.Title != "Team" { - t.Errorf("expected created page with correct Slug and Title") + if docs.ID != "docs-section" { + t.Fatalf("expected docs ID to be 'docs-section', got %q", docs.ID) } - // Verify the page was actually created in the tree - about := home.Children[0] - if len(about.Children) != 1 || about.Children[0].Slug != "team" { - t.Errorf("expected 'team' page to be a child of 'about'") + gettingStarted := findChildBySlug(t, docs, "getting-started") + if gettingStarted.Kind != NodeKindPage { + t.Fatalf("expected getting-started to be a page, got %q", gettingStarted.Kind) } -} -func TestTreeService_EnsurePagePath_AlreadyExists(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() + guides := findChildBySlug(t, docs, "guides") + if guides.Kind != NodeKindSection { + t.Fatalf("expected guides to be a section, got %q", guides.Kind) + } - _, _ = service.CreatePage("system", nil, "Home", "home") - home := service.GetTree().Children[0] - _, _ = service.CreatePage("system", &home.ID, "About", "about") - about := home.Children[0] - _, _ = service.CreatePage("system", &about.ID, "Team", "team") + basic := findChildBySlug(t, guides, "basic") + if basic.Kind != NodeKindPage { + t.Fatalf("expected basic to be a page, got %q", basic.Kind) + } - result, err := service.EnsurePagePath("system", "home/about/team", "Team") - if err != nil { - t.Fatalf("unexpected error: %v", err) + // Verify all nodes have metadata + if intro.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected intro to have metadata") } + if docs.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected docs to have metadata") + } + if guides.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected guides to have metadata") + } + if basic.Metadata.CreatedAt.IsZero() { + t.Fatalf("expected basic to have metadata") + } + + // Verify tree.json was saved and can be reloaded + treeJSONPath := filepath.Join(tmpDir, "tree.json") + mustStat(t, treeJSONPath) - if !result.Exists { - t.Errorf("expected path to exist") + reloadedSvc := NewTreeService(tmpDir) + if err := reloadedSvc.LoadTree(); err != nil { + t.Fatalf("LoadTree after reconstruction failed: %v", err) } - if result.Page == nil || result.Page.Slug != "team" { - t.Errorf("expected existing page with correct Slug") + + reloadedTree := reloadedSvc.GetTree() + if len(reloadedTree.Children) != len(tree.Children) { + t.Fatalf("expected reloaded tree to have same number of children") } } -func TestTreeService_EnsurePagePath_PartialExistence(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() - - _, _ = service.CreatePage("system", nil, "Home", "home") - home := service.GetTree().Children[0] - _, _ = service.CreatePage("system", &home.ID, "About", "about") +func TestTreeService_ReconstructTreeFromFS_EmptyDirectory_CreatesRootAndPersists(t *testing.T) { + svc, tmpDir := newLoadedService(t) - result, err := service.EnsurePagePath("system", "home/about/team/members", "Members") + // Reconstruct from empty directory (should create just root) + err := svc.ReconstructTreeFromFS() if err != nil { - t.Fatalf("unexpected error: %v", err) + t.Fatalf("ReconstructTreeFromFS failed: %v", err) } - if !result.Exists { - t.Errorf("expected full path to exist after creation") - } - if result.Page == nil || result.Page.Slug != "members" || result.Page.Title != "Members" { - t.Errorf("expected created 'members' page with correct Slug and Title") + tree := svc.GetTree() + if tree == nil || tree.ID != "root" { + t.Fatalf("expected root node, got: %+v", tree) } - // Verify the intermediate 'team' page was also created - about := home.Children[0] - if len(about.Children) != 1 || about.Children[0].Slug != "team" { - t.Errorf("expected 'team' page to be a child of 'about'") + // Note: Root metadata may not be backfilled from filesystem when directory is empty + // because there's no corresponding file/directory to stat. This is expected behavior. + // The important thing is that the tree is reconstructed and persisted. + + // Verify tree.json was saved + treeJSONPath := filepath.Join(tmpDir, "tree.json") + mustStat(t, treeJSONPath) + + // Verify we can reload + reloadedSvc := NewTreeService(tmpDir) + if err := reloadedSvc.LoadTree(); err != nil { + t.Fatalf("LoadTree after reconstruction failed: %v", err) } - team := about.Children[0] - if len(team.Children) != 1 || team.Children[0].Slug != "members" { - t.Errorf("expected 'members' page to be a child of 'team'") + + reloadedTree := reloadedSvc.GetTree() + if reloadedTree == nil || reloadedTree.ID != "root" { + t.Fatalf("expected root node after reload") } } -func TestTreeService_EnsurePagePath_EmptyPath(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() +func TestTreeService_ReconstructTreeFromFS_RevertsOnMetadataBackfillError(t *testing.T) { + // This test is harder to trigger without mocking, but we can at least verify + // that if the tree state is preserved if we can cause a failure scenario. + // For now, we'll test that a successful reconstruction doesn't lose the old tree. + svc, tmpDir := newLoadedService(t) - result, err := service.EnsurePagePath("system", "", "Root") - if err == nil { - t.Fatalf("expected error for empty path, got nil") + // Create initial tree state + initialID, err := svc.CreateNode("system", nil, "Initial", "initial", ptrKind(NodeKindPage)) + if err != nil { + t.Fatalf("CreateNode failed: %v", err) } - if result != nil { - t.Errorf("expected nil result for empty path") + // Get initial tree + initialTree := svc.GetTree() + if len(initialTree.Children) != 1 { + t.Fatalf("expected 1 child in initial tree") } -} -func TestTreeService_EnsurePagePath_PathStartingWithSlash(t *testing.T) { - tmpDir := t.TempDir() - service := NewTreeService(tmpDir) - _ = service.LoadTree() + // Create a new file on disk + mustWriteFile(t, filepath.Join(tmpDir, "root", "new-page.md"), `--- +leafwiki_id: new-page +leafwiki_title: New Page +--- +# New Page`, 0o644) - result, err := service.EnsurePagePath("system", "/leading/slash", "Invalid") + // Reconstruct should succeed + err = svc.ReconstructTreeFromFS() if err != nil { - t.Fatalf("expected error for invalid path, got nil") + t.Fatalf("ReconstructTreeFromFS failed: %v", err) + } + + // Verify new tree has both nodes + newTree := svc.GetTree() + if len(newTree.Children) != 2 { + t.Fatalf("expected 2 children after reconstruction, got %d", len(newTree.Children)) } - if result == nil { - t.Errorf("expected nil result for invalid path") + // Verify initial node still exists + var foundInitial bool + for _, child := range newTree.Children { + if child.ID == *initialID { + foundInitial = true + break + } + } + if !foundInitial { + t.Fatalf("expected initial node to still exist after reconstruction") } } + +// --- small util --- + +func ptrKind(k NodeKind) *NodeKind { return &k } diff --git a/internal/http/api/convert_page.go b/internal/http/api/convert_page.go new file mode 100644 index 00000000..359e469f --- /dev/null +++ b/internal/http/api/convert_page.go @@ -0,0 +1,51 @@ +package api + +import ( + "net/http" + + "github.com/gin-gonic/gin" + "github.com/perber/wiki/internal/core/tree" + auth_middleware "github.com/perber/wiki/internal/http/middleware/auth" + "github.com/perber/wiki/internal/wiki" +) + +type convertPageRequest struct { + TargetKind string `json:"targetKind" binding:"required"` +} + +func ConvertPageHandler(w *wiki.Wiki) gin.HandlerFunc { + return func(c *gin.Context) { + var req convertPageRequest + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid request"}) + return + } + + user := auth_middleware.MustGetUser(c) + if user == nil { + return + } + + id := c.Param("id") + if id == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing page ID"}) + return + } + + // Validate TargetKind before converting to tree.NodeKind + if req.TargetKind != "page" && req.TargetKind != "section" { + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid targetKind"}) + return + } + + kind := tree.NodeKind(req.TargetKind) + + err := w.ConvertPage(user.ID, id, kind) + if err != nil { + respondWithError(c, err) + return + } + + c.JSON(http.StatusOK, gin.H{"status": "page converted"}) + } +} diff --git a/internal/http/api/create_page.go b/internal/http/api/create_page.go index 702c95e4..bad95e49 100644 --- a/internal/http/api/create_page.go +++ b/internal/http/api/create_page.go @@ -4,6 +4,7 @@ import ( "net/http" "github.com/gin-gonic/gin" + "github.com/perber/wiki/internal/core/tree" auth_middleware "github.com/perber/wiki/internal/http/middleware/auth" "github.com/perber/wiki/internal/wiki" ) @@ -12,6 +13,7 @@ type createPageRequest struct { ParentID *string `json:"parentId"` // optional Title string `json:"title" binding:"required"` Slug string `json:"slug" binding:"required"` + Kind *string `json:"kind"` // optional } func CreatePageHandler(w *wiki.Wiki) gin.HandlerFunc { @@ -27,7 +29,11 @@ func CreatePageHandler(w *wiki.Wiki) gin.HandlerFunc { return } - page, err := w.CreatePage(user.ID, req.ParentID, req.Title, req.Slug) + kind := tree.NodeKindPage + if req.Kind != nil { + kind = tree.NodeKind(*req.Kind) + } + page, err := w.CreatePage(user.ID, req.ParentID, req.Title, req.Slug, &kind) if err != nil { respondWithError(c, err) return diff --git a/internal/http/api/ensure_page.go b/internal/http/api/ensure_page.go index db56f85f..a13ee13e 100644 --- a/internal/http/api/ensure_page.go +++ b/internal/http/api/ensure_page.go @@ -4,6 +4,7 @@ import ( "net/http" "github.com/gin-gonic/gin" + "github.com/perber/wiki/internal/core/tree" auth_middleware "github.com/perber/wiki/internal/http/middleware/auth" "github.com/perber/wiki/internal/wiki" ) @@ -26,7 +27,8 @@ func EnsurePageHandler(w *wiki.Wiki) gin.HandlerFunc { return } - result, err := w.EnsurePath(user.ID, req.Path, req.TargetTitle) + kind := tree.NodeKindPage + result, err := w.EnsurePath(user.ID, req.Path, req.TargetTitle, &kind) if err != nil { respondWithError(c, err) return diff --git a/internal/http/api/helpers.go b/internal/http/api/helpers.go index df7283b6..47a21ac7 100644 --- a/internal/http/api/helpers.go +++ b/internal/http/api/helpers.go @@ -83,6 +83,7 @@ func ToAPINode(node *tree.PageNode, parentPath string, userResolver *auth.UserRe Slug: node.Slug, Path: path, Position: node.Position, + Kind: node.Kind, Metadata: NodeMetadata{ CreatedAt: node.Metadata.CreatedAt.Format(time.RFC3339), UpdatedAt: node.Metadata.UpdatedAt.Format(time.RFC3339), diff --git a/internal/http/api/import.go b/internal/http/api/import.go new file mode 100644 index 00000000..31911d51 --- /dev/null +++ b/internal/http/api/import.go @@ -0,0 +1,88 @@ +package api + +import ( + "net/http" + + "github.com/gin-gonic/gin" + auth_middleware "github.com/perber/wiki/internal/http/middleware/auth" + "github.com/perber/wiki/internal/importer" + "github.com/perber/wiki/internal/wiki" +) + +func CreateImportPlanHandler(svc *importer.ImporterService) gin.HandlerFunc { + return func(c *gin.Context) { + user := auth_middleware.MustGetUser(c) + if user == nil { + return + } + + const maxUploadSize = 500 << 20 // 500 MiB (~524 MB) + c.Request.Body = http.MaxBytesReader(c.Writer, c.Request.Body, maxUploadSize) + + // Parse form + if err := c.Request.ParseMultipartForm(maxUploadSize); err != nil { + c.JSON(http.StatusRequestEntityTooLarge, gin.H{"error": "upload exceeds maximum size limit of 500 MiB"}) + return + } + + // multipart: file + fh, err := c.FormFile("file") + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing file"}) + return + } + + file, err := fh.Open() + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "failed to open uploaded file"}) + return + } + defer file.Close() + + // optional: targetBasePath from form (defaults to empty string = root) + targetBasePath := c.PostForm("targetBasePath") + + plan, err := svc.CreateImportPlanFromZipUpload(file, targetBasePath) + if err != nil { + respondWithError(c, err) + return + } + + c.JSON(http.StatusOK, plan) + } +} + +func GetImportPlanHandler(svc *importer.ImporterService) gin.HandlerFunc { + return func(c *gin.Context) { + plan, err := svc.GetCurrentPlan() + if err != nil { + respondWithError(c, err) + return + } + c.JSON(http.StatusOK, plan) + } +} + +func ExecuteImportHandler(svc *importer.ImporterService, w *wiki.Wiki) gin.HandlerFunc { + return func(c *gin.Context) { + user := auth_middleware.MustGetUser(c) + if user == nil { + return + } + + res, err := svc.ExecuteCurrentPlan(user.ID) + if err != nil { + respondWithError(c, err) + return + } + + c.JSON(http.StatusOK, res) + } +} + +func ClearImportPlanHandler(svc *importer.ImporterService) gin.HandlerFunc { + return func(c *gin.Context) { + svc.ClearCurrentPlan() + c.JSON(http.StatusOK, gin.H{"ok": true}) + } +} diff --git a/internal/http/api/node.go b/internal/http/api/node.go index 96c69c08..f463f80c 100644 --- a/internal/http/api/node.go +++ b/internal/http/api/node.go @@ -1,6 +1,9 @@ package api -import "github.com/perber/wiki/internal/core/auth" +import ( + "github.com/perber/wiki/internal/core/auth" + "github.com/perber/wiki/internal/core/tree" +) type NodeMetadata struct { CreatedAt string `json:"createdAt"` @@ -13,11 +16,12 @@ type NodeMetadata struct { } type Node struct { - ID string `json:"id"` - Title string `json:"title"` - Slug string `json:"slug"` - Path string `json:"path"` - Position int `json:"position"` - Children []*Node `json:"children"` - Metadata NodeMetadata `json:"metadata"` + ID string `json:"id"` + Title string `json:"title"` + Slug string `json:"slug"` + Path string `json:"path"` + Position int `json:"position"` + Kind tree.NodeKind `json:"kind"` + Children []*Node `json:"children"` + Metadata NodeMetadata `json:"metadata"` } diff --git a/internal/http/api/update_page.go b/internal/http/api/update_page.go index ded576bb..d49a0cc6 100644 --- a/internal/http/api/update_page.go +++ b/internal/http/api/update_page.go @@ -4,6 +4,7 @@ import ( "net/http" "github.com/gin-gonic/gin" + "github.com/perber/wiki/internal/core/tree" auth_middleware "github.com/perber/wiki/internal/http/middleware/auth" "github.com/perber/wiki/internal/wiki" ) @@ -13,9 +14,9 @@ func UpdatePageHandler(w *wiki.Wiki) gin.HandlerFunc { id := c.Param("id") var req struct { - Title string `json:"title" binding:"required"` - Slug string `json:"slug" binding:"required"` - Content string `json:"content"` + Title string `json:"title" binding:"required"` + Slug string `json:"slug" binding:"required"` + Content *string `json:"content"` } if err := c.ShouldBindJSON(&req); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid payload"}) @@ -27,7 +28,8 @@ func UpdatePageHandler(w *wiki.Wiki) gin.HandlerFunc { return } - page, err := w.UpdatePage(user.ID, id, req.Title, req.Slug, req.Content) + kind := tree.NodeKindPage + page, err := w.UpdatePage(user.ID, id, req.Title, req.Slug, req.Content, &kind) if err != nil { respondWithError(c, err) return diff --git a/internal/http/router.go b/internal/http/router.go index 6eb09bdb..95a12683 100644 --- a/internal/http/router.go +++ b/internal/http/router.go @@ -4,6 +4,7 @@ import ( "embed" "io/fs" "log" + "log/slog" "net/http" "os" "path/filepath" @@ -14,6 +15,7 @@ import ( "github.com/perber/wiki/internal/http/api" auth_middleware "github.com/perber/wiki/internal/http/middleware/auth" "github.com/perber/wiki/internal/http/middleware/security" + "github.com/perber/wiki/internal/importer" "github.com/perber/wiki/internal/wiki" ) @@ -28,6 +30,26 @@ var EmbedFrontend = "false" // Environment is a flag to set the environment var Environment = "development" +// Slog Wrapper for Gin (Info level) +type slogWriter struct { + logger *slog.Logger +} + +func (sw *slogWriter) Write(p []byte) (n int, err error) { + sw.logger.Info(strings.TrimSpace(string(p))) + return len(p), nil +} + +// Slog Wrapper for Gin Errors (Error level) +type slogErrorWriter struct { + logger *slog.Logger +} + +func (sew *slogErrorWriter) Write(p []byte) (n int, err error) { + sew.logger.Error(strings.TrimSpace(string(p))) + return len(p), nil +} + type RouterOptions struct { PublicAccess bool // Whether the wiki allows public read access InjectCodeInHeader string // Raw HTML/JS code to inject into the tag @@ -38,6 +60,16 @@ type RouterOptions struct { AuthDisabled bool // Whether authentication is disabled } +// wireImporterService sets up and returns an ImporterService instance +// Parameters: +// - w: the wiki instance to use for importing +func wireImporterService(w *wiki.Wiki) *importer.ImporterService { + slugger := w.GetSlugService() + planner := importer.NewPlanner(w, slugger) + store := importer.NewPlanStore() + return importer.NewImporterService(planner, store) +} + // NewRouter creates a new HTTP router for the wiki application. // Parameters: // - wikiInstance: the wiki instance to serve @@ -49,6 +81,12 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { gin.SetMode(gin.DebugMode) } + // Set Gin to use slog for logging + gin.DefaultWriter = &slogWriter{logger: slog.Default().With("component", "gin")} + gin.DefaultErrorWriter = &slogErrorWriter{logger: slog.Default().With("component", "gin")} + + importerService := wireImporterService(wikiInstance) + router := gin.Default() router.StaticFS("/assets", gin.Dir(wikiInstance.GetAssetService().GetAssetsDir(), true)) @@ -113,6 +151,7 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { // Pages requiresAuthGroup.POST("/pages", auth_middleware.RequireEditorOrAdmin(), api.CreatePageHandler(wikiInstance)) requiresAuthGroup.POST("/pages/ensure", auth_middleware.RequireEditorOrAdmin(), api.EnsurePageHandler(wikiInstance)) + requiresAuthGroup.POST("/pages/convert/:id", auth_middleware.RequireEditorOrAdmin(), api.ConvertPageHandler(wikiInstance)) requiresAuthGroup.POST("/pages/copy/:id", auth_middleware.RequireEditorOrAdmin(), api.CopyPageHandler(wikiInstance)) requiresAuthGroup.PUT("/pages/:id", auth_middleware.RequireEditorOrAdmin(), api.UpdatePageHandler(wikiInstance)) requiresAuthGroup.DELETE("/pages/:id", auth_middleware.RequireEditorOrAdmin(), api.DeletePageHandler(wikiInstance)) @@ -144,22 +183,28 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { requiresAuthGroup.GET("/pages/:id/assets", auth_middleware.RequireEditorOrAdmin(), api.ListAssetsHandler(wikiInstance)) requiresAuthGroup.PUT("/pages/:id/assets/rename", auth_middleware.RequireEditorOrAdmin(), api.RenameAssetHandler(wikiInstance)) requiresAuthGroup.DELETE("/pages/:id/assets/:name", auth_middleware.RequireEditorOrAdmin(), api.DeleteAssetHandler(wikiInstance)) + + // Importer + requiresAuthGroup.POST("/import/plan", auth_middleware.RequireEditorOrAdmin(), api.CreateImportPlanHandler(importerService)) + requiresAuthGroup.GET("/import/plan", auth_middleware.RequireEditorOrAdmin(), api.GetImportPlanHandler(importerService)) + requiresAuthGroup.POST("/import/execute", auth_middleware.RequireEditorOrAdmin(), api.ExecuteImportHandler(importerService, wikiInstance)) + requiresAuthGroup.DELETE("/import/plan", auth_middleware.RequireEditorOrAdmin(), api.ClearImportPlanHandler(importerService)) } // Serve branding assets (logos, favicons) with extension validation router.GET("/branding/:filename", func(c *gin.Context) { filename := c.Param("filename") - + // Sanitize filename to prevent directory traversal and malicious input // Only allow simple filenames (no path separators, no null bytes, no ..) - if strings.Contains(filename, "..") || - strings.Contains(filename, "/") || - strings.Contains(filename, "\\") || + if strings.Contains(filename, "..") || + strings.Contains(filename, "/") || + strings.Contains(filename, "\\") || strings.Contains(filename, "\x00") { c.Status(http.StatusForbidden) return } - + // Get allowed extensions from branding constraints constraints, err := wikiInstance.GetBrandingConstraints() if err != nil { @@ -167,7 +212,7 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { c.Status(http.StatusInternalServerError) return } - + // Build a combined set of allowed extensions for O(1) lookup allowedExts := make(map[string]bool) for _, ext := range constraints.LogoExts { @@ -176,22 +221,22 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { for _, ext := range constraints.FaviconExts { allowedExts[ext] = true } - + // Validate file extension against whitelist ext := strings.ToLower(filepath.Ext(filename)) if !allowedExts[ext] { c.Status(http.StatusForbidden) return } - + // Construct file path brandingDir := wikiInstance.GetBrandingService().GetBrandingAssetsDir() filePath := filepath.Join(brandingDir, filename) - + // Clean the path and verify it's within the branding directory cleanPath := filepath.Clean(filePath) cleanBrandingDir := filepath.Clean(brandingDir) - + // Ensure the resolved path is still within the branding directory // Use filepath.Rel to check the relative path doesn't escape the directory rel, err := filepath.Rel(cleanBrandingDir, cleanPath) @@ -199,7 +244,7 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { c.Status(http.StatusForbidden) return } - + // Check if file exists if _, err := os.Stat(cleanPath); os.IsNotExist(err) { c.Status(http.StatusNotFound) @@ -209,7 +254,7 @@ func NewRouter(wikiInstance *wiki.Wiki, options RouterOptions) *gin.Engine { c.Status(http.StatusInternalServerError) return } - + // Serve the file c.File(cleanPath) }) diff --git a/internal/http/router_test.go b/internal/http/router_test.go index cefcc288..9ce24efa 100644 --- a/internal/http/router_test.go +++ b/internal/http/router_test.go @@ -12,9 +12,15 @@ import ( "time" "github.com/gin-gonic/gin" + "github.com/perber/wiki/internal/core/tree" "github.com/perber/wiki/internal/wiki" ) +func pageNodeKind() *tree.NodeKind { + kind := tree.NodeKindPage + return &kind +} + func createWikiTestInstance(t *testing.T) *wiki.Wiki { w, err := wiki.NewWiki(&wiki.WikiOptions{ StorageDir: t.TempDir(), @@ -306,7 +312,7 @@ func TestDeletePageEndpoint(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - _, err := w.CreatePage("system", nil, "Delete Me", "delete-me") + _, err := w.CreatePage("system", nil, "Delete Me", "delete-me", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -339,11 +345,11 @@ func TestDeletePageEndpoint_HasChildren(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - parent, err := w.CreatePage("system", nil, "Parent", "parent") + parent, err := w.CreatePage("system", nil, "Parent", "parent", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } - _, err = w.CreatePage("system", &parent.ID, "Child", "child") + _, err = w.CreatePage("system", &parent.ID, "Child", "child", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -360,11 +366,11 @@ func TestDeletePageEndpoint_Recursive(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - parent, err := w.CreatePage("system", nil, "Parent", "parent") + parent, err := w.CreatePage("system", nil, "Parent", "parent", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } - _, err = w.CreatePage("system", &parent.ID, "Child", "child") + _, err = w.CreatePage("system", &parent.ID, "Child", "child", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -384,7 +390,7 @@ func TestUpdatePageEndpoint(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - _, err := w.CreatePage("system", nil, "Original Title", "original-title") + _, err := w.CreatePage("system", nil, "Original Title", "original-title", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -437,7 +443,7 @@ func TestUpdatePage_SlugRemainsIfUnchanged(t *testing.T) { router := createRouterTestInstance(w, t) // Create a page - created, err := w.CreatePage("system", nil, "Immutable Slug", "immutable-slug") + created, err := w.CreatePage("system", nil, "Immutable Slug", "immutable-slug", pageNodeKind()) if err != nil { t.Fatalf("Failed to create page: %v", err) } @@ -471,13 +477,13 @@ func TestUpdatePage_PageAlreadyExists(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - _, err := w.CreatePage("system", nil, "Original Title", "original-title") + _, err := w.CreatePage("system", nil, "Original Title", "original-title", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } page := w.GetTree().Children[0] - _, err = w.CreatePage("system", nil, "Conflict Title", "conflict-title") + _, err = w.CreatePage("system", nil, "Conflict Title", "conflict-title", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -540,7 +546,7 @@ func TestGetPageEndpoint(t *testing.T) { router := createRouterTestInstance(w, t) // Create a page - _, err := w.CreatePage("system", nil, "Welcome", "welcome") + _, err := w.CreatePage("system", nil, "Welcome", "welcome", pageNodeKind()) if err != nil { t.Fatalf("Failed to create page: %v", err) } @@ -602,11 +608,11 @@ func TestMovePageEndpoint(t *testing.T) { router := createRouterTestInstance(w, t) // Create two pages a and b - _, err := w.CreatePage("system", nil, "Section A", "section-a") + _, err := w.CreatePage("system", nil, "Section A", "section-a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } - _, err = w.CreatePage("system", nil, "Section B", "section-b") + _, err = w.CreatePage("system", nil, "Section B", "section-b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -668,7 +674,7 @@ func TestMovePageEndpoint_ParentNotFound(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - _, err := w.CreatePage("system", nil, "Section A", "section-a") + _, err := w.CreatePage("system", nil, "Section A", "section-a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -689,13 +695,13 @@ func TestMovePageEndpoint_CircularReference(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - _, err := w.CreatePage("system", nil, "Section A", "section-a") + _, err := w.CreatePage("system", nil, "Section A", "section-a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } a := w.GetTree().Children[0] - _, err = w.CreatePage("system", &a.ID, "Section B", "section-b") + _, err = w.CreatePage("system", &a.ID, "Section B", "section-b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -714,19 +720,19 @@ func TestMovePage_FailsIfTargetAlreadyHasPageWithSameSlug(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - _, err := w.CreatePage("system", nil, "Section A", "section-a") + _, err := w.CreatePage("system", nil, "Section A", "section-a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } a := w.GetTree().Children[0] - _, err = w.CreatePage("system", nil, "Section B", "section-b") + _, err = w.CreatePage("system", nil, "Section B", "section-b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } // Create Conflict Page in b - conflictPage, err := w.CreatePage("system", &a.ID, "Section B", "section-b") + conflictPage, err := w.CreatePage("system", &a.ID, "Section B", "section-b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -744,7 +750,7 @@ func TestMovePage_InTheSamePlace(t *testing.T) { defer w.Close() router := createRouterTestInstance(w, t) - _, err := w.CreatePage("system", nil, "Section A", "section-a") + _, err := w.CreatePage("system", nil, "Section A", "section-a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -763,15 +769,15 @@ func TestSortPagesEndpoint(t *testing.T) { router := createRouterTestInstance(w, t) // Create pages - page1, err := w.CreatePage("system", nil, "Page 1", "page-1") + page1, err := w.CreatePage("system", nil, "Page 1", "page-1", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } - page2, err := w.CreatePage("system", nil, "Page 2", "page-2") + page2, err := w.CreatePage("system", nil, "Page 2", "page-2", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } - page3, err := w.CreatePage("system", nil, "Page 3", "page-3") + page3, err := w.CreatePage("system", nil, "Page 3", "page-3", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -1342,7 +1348,7 @@ func TestAssetEndpoints(t *testing.T) { } // Step 1: Create page direkt über Wiki-API - page, err := w.CreatePage("system", nil, "Assets Page", "assets-page") + page, err := w.CreatePage("system", nil, "Assets Page", "assets-page", pageNodeKind()) if err != nil { t.Fatalf("Failed to create page: %v", err) } diff --git a/internal/importer/executor.go b/internal/importer/executor.go new file mode 100644 index 00000000..6fe3e017 --- /dev/null +++ b/internal/importer/executor.go @@ -0,0 +1,137 @@ +package importer + +import ( + "fmt" + "log/slog" + "path/filepath" + + "github.com/perber/wiki/internal/core/markdown" +) + +type ExecutionResult struct { + ImportedCount int `json:"imported_count"` + UpdatedCount int `json:"updated_count"` + SkippedCount int `json:"skipped_count"` + Items []ExecutionItemResult `json:"items"` + TreeHash string `json:"tree_hash"` // hash of the state of the wiki tree after import + TreeHashBefore string `json:"tree_hash_before"` // hash of the state of the wiki tree before import +} + +type ExecutionAction string + +const ( + ExecutionActionCreated ExecutionAction = "created" + ExecutionActionUpdated ExecutionAction = "updated" + ExecutionActionSkipped ExecutionAction = "skipped" +) + +type ExecutionItemResult struct { + SourcePath string `json:"source_path"` + TargetPath string `json:"target_path"` + Action ExecutionAction `json:"action"` + Error *string `json:"error,omitempty"` + Notes []string `json:"notes,omitempty"` +} + +type Executor struct { + plan *PlanResult + planOptions *PlanOptions + wiki ImporterWiki + logger *slog.Logger +} + +func NewExecutor(plan *PlanResult, planOptions *PlanOptions, wiki ImporterWiki, logger *slog.Logger) *Executor { + return &Executor{ + plan: plan, + planOptions: planOptions, + wiki: wiki, + logger: logger.With("component", "ImporterExecutor"), + } +} + +// Execute runs the import based on the provided plan +func (e *Executor) Execute(userID string) (*ExecutionResult, error) { + beforeExecution := e.wiki.TreeHash() + if e.plan.TreeHash != beforeExecution { + return nil, fmt.Errorf("plan is stale: expected tree_hash %s but got %s", e.plan.TreeHash, beforeExecution) + } + + result := &ExecutionResult{ + TreeHashBefore: beforeExecution, + } + + for _, item := range e.plan.Items { + execItem := ExecutionItemResult{ + SourcePath: item.SourcePath, + TargetPath: item.TargetPath, + Notes: append([]string{}, item.Notes...), + Error: nil, + } + + switch item.Action { + case PlanActionCreate: + // Creates the page or section and also all necessary parent sections + page, err := e.wiki.EnsurePath(userID, item.TargetPath, item.Title, &item.Kind) + if err != nil { + errMsg := err.Error() + execItem.Action = ExecutionActionSkipped + execItem.Error = &errMsg + result.SkippedCount++ + result.Items = append(result.Items, execItem) + e.logger.Error("Failed to ensure path", "target_path", item.TargetPath, "error", err) + continue + } + // Read the content from the source path + // And update the page content + if page == nil { + errMsg := "could not create page" + execItem.Action = ExecutionActionSkipped + execItem.Error = &errMsg + result.SkippedCount++ + result.Items = append(result.Items, execItem) + e.logger.Error("Could not create page", "target_path", item.TargetPath, "error", errMsg) + continue + } + sourceAbs := filepath.Join(e.planOptions.SourceBasePath, filepath.FromSlash(item.SourcePath)) + mdFile, err := markdown.LoadMarkdownFile(sourceAbs) + if err != nil { + errMsg := err.Error() + execItem.Action = ExecutionActionSkipped + execItem.Error = &errMsg + result.SkippedCount++ + result.Items = append(result.Items, execItem) + e.logger.Error("Failed to load source file", "source_path", sourceAbs, "error", err) + continue + } + body := mdFile.GetContent() + if _, err := e.wiki.UpdatePage(userID, page.ID, page.Title, page.Slug, &body, &page.Kind); err != nil { + errMsg := err.Error() + execItem.Action = ExecutionActionSkipped + execItem.Error = &errMsg + result.SkippedCount++ + result.Items = append(result.Items, execItem) + e.logger.Error("Failed to update page content", "page_id", page.ID, "error", err) + continue + } + execItem.Action = ExecutionActionCreated + result.ImportedCount++ + e.logger.Info("Imported page", "source_path", item.SourcePath, "target_path", item.TargetPath, "page_id", page.ID) + case PlanActionSkip: + execItem.Action = ExecutionActionSkipped + e.logger.Info("Skipped page", "source_path", item.SourcePath, "target_path", item.TargetPath) + result.SkippedCount++ + default: + errMsg := "unknown action" + execItem.Action = ExecutionActionSkipped + execItem.Error = &errMsg + e.logger.Info("Skipped page with unknown action", "source_path", item.SourcePath, "target_path", item.TargetPath) + result.SkippedCount++ + } + + result.Items = append(result.Items, execItem) + } + + result.TreeHash = e.wiki.TreeHash() + + return result, nil +} diff --git a/internal/importer/executor_test.go b/internal/importer/executor_test.go new file mode 100644 index 00000000..eb6af495 --- /dev/null +++ b/internal/importer/executor_test.go @@ -0,0 +1,206 @@ +package importer + +import ( + "errors" + "log/slog" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/perber/wiki/internal/core/tree" +) + +type fakeExecWiki struct { + hash string + + ensureCalls int + updateCalls int + + ensureFn func(userID, targetPath, title string, kind *tree.NodeKind) (*tree.Page, error) + updateFn func(userID, id, title, slug string, content *string, kind *tree.NodeKind) (*tree.Page, error) + + lastUpdatedContent *string +} + +func (f *fakeExecWiki) TreeHash() string { return f.hash } + +func (f *fakeExecWiki) LookupPagePath(path string) (*tree.PathLookup, error) { + panic("not used by Executor") +} + +func (f *fakeExecWiki) EnsurePath(userID string, targetPath string, title string, kind *tree.NodeKind) (*tree.Page, error) { + f.ensureCalls++ + if f.ensureFn != nil { + return f.ensureFn(userID, targetPath, title, kind) + } + return &tree.Page{PageNode: &tree.PageNode{ID: "p1", Title: title, Slug: "slug", Kind: *kind}}, nil +} + +func (f *fakeExecWiki) UpdatePage(userID string, id, title, slug string, content *string, kind *tree.NodeKind) (*tree.Page, error) { + f.updateCalls++ + f.lastUpdatedContent = content + if f.updateFn != nil { + return f.updateFn(userID, id, title, slug, content, kind) + } + // simulate tree change + f.hash = f.hash + "-changed" + return &tree.Page{PageNode: &tree.PageNode{ID: id, Title: title, Slug: slug, Kind: *kind}}, nil +} + +func writeTmp(t *testing.T, dir, rel, content string) { + t.Helper() + abs := filepath.Join(dir, filepath.FromSlash(rel)) + if err := os.MkdirAll(filepath.Dir(abs), 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + if err := os.WriteFile(abs, []byte(content), 0o644); err != nil { + t.Fatalf("write: %v", err) + } +} + +func TestExecutor_StalePlan(t *testing.T) { + w := &fakeExecWiki{hash: "new"} + plan := &PlanResult{TreeHash: "old"} + opts := &PlanOptions{SourceBasePath: t.TempDir()} + ex := NewExecutor(plan, opts, w, slog.Default()) + + got, err := ex.Execute("user1") + if err == nil { + t.Fatalf("expected stale plan error") + } + if got != nil { + t.Fatalf("expected nil result on stale plan, got %#v", got) + } +} + +func TestExecutor_Create_HappyPath_StripsFrontmatter(t *testing.T) { + tmp := t.TempDir() + writeTmp(t, tmp, "a.md", "---\ntitle: X\n---\n\n# Heading\nBody") + + w := &fakeExecWiki{hash: "h1"} + plan := &PlanResult{ + TreeHash: "h1", + Items: []PlanItem{ + {SourcePath: "a.md", TargetPath: "docs/a", Title: "A", Kind: tree.NodeKindPage, Action: PlanActionCreate}, + }, + } + opts := &PlanOptions{SourceBasePath: tmp} + + ex := NewExecutor(plan, opts, w, slog.Default()) + + res, err := ex.Execute("user1") + if err != nil { + t.Fatalf("Execute err: %v", err) + } + + if res.ImportedCount != 1 || res.SkippedCount != 0 { + t.Fatalf("counts imported=%d skipped=%d", res.ImportedCount, res.SkippedCount) + } + if len(res.Items) != 1 || res.Items[0].Action != ExecutionActionCreated { + t.Fatalf("item result: %#v", res.Items) + } + if w.ensureCalls != 1 || w.updateCalls != 1 { + t.Fatalf("calls ensure=%d update=%d", w.ensureCalls, w.updateCalls) + } + + if w.lastUpdatedContent == nil { + t.Fatalf("expected content to be passed to UpdatePage") + } + if strings.Contains(*w.lastUpdatedContent, "title: X") || strings.Contains(*w.lastUpdatedContent, "---") { + t.Fatalf("frontmatter was not stripped, got: %q", *w.lastUpdatedContent) + } + if !strings.Contains(*w.lastUpdatedContent, "# Heading") { + t.Fatalf("expected body content, got: %q", *w.lastUpdatedContent) + } + + if res.TreeHashBefore != "h1" { + t.Fatalf("TreeHashBefore = %q", res.TreeHashBefore) + } + if res.TreeHash == "h1" { + t.Fatalf("expected TreeHash to change (fake changes it), got %q", res.TreeHash) + } +} + +func TestExecutor_Skip_DoesNotCallWiki(t *testing.T) { + tmp := t.TempDir() + w := &fakeExecWiki{hash: "h1"} + plan := &PlanResult{ + TreeHash: "h1", + Items: []PlanItem{ + {SourcePath: "a.md", TargetPath: "docs/a", Action: PlanActionSkip}, + }, + } + opts := &PlanOptions{SourceBasePath: tmp} + + ex := NewExecutor(plan, opts, w, slog.Default()) + res, err := ex.Execute("user1") + if err != nil { + t.Fatalf("Execute err: %v", err) + } + + if res.SkippedCount != 1 || res.ImportedCount != 0 { + t.Fatalf("counts imported=%d skipped=%d", res.ImportedCount, res.SkippedCount) + } + if w.ensureCalls != 0 || w.updateCalls != 0 { + t.Fatalf("expected no wiki calls, got ensure=%d update=%d", w.ensureCalls, w.updateCalls) + } +} + +func TestExecutor_Create_EnsurePathError_SkipsItem(t *testing.T) { + tmp := t.TempDir() + writeTmp(t, tmp, "a.md", "Body") + + w := &fakeExecWiki{ + hash: "h1", + ensureFn: func(userID, targetPath, title string, kind *tree.NodeKind) (*tree.Page, error) { + return nil, errors.New("boom") + }, + } + plan := &PlanResult{ + TreeHash: "h1", + Items: []PlanItem{ + {SourcePath: "a.md", TargetPath: "docs/a", Title: "A", Kind: tree.NodeKindPage, Action: PlanActionCreate}, + }, + } + opts := &PlanOptions{SourceBasePath: tmp} + + ex := NewExecutor(plan, opts, w, slog.Default()) + res, err := ex.Execute("user1") + if err != nil { + t.Fatalf("Execute err: %v", err) + } + if res.SkippedCount != 1 || res.ImportedCount != 0 { + t.Fatalf("counts imported=%d skipped=%d", res.ImportedCount, res.SkippedCount) + } + if res.Items[0].Error == nil || *res.Items[0].Error == "" { + t.Fatalf("expected error message") + } + if w.updateCalls != 0 { + t.Fatalf("UpdatePage should not be called") + } +} + +func TestExecutor_UnknownAction_SkipsItem(t *testing.T) { + tmp := t.TempDir() + w := &fakeExecWiki{hash: "h1"} + plan := &PlanResult{ + TreeHash: "h1", + Items: []PlanItem{ + {SourcePath: "a.md", TargetPath: "docs/a", Action: PlanActionUpdate}, // not handled in switch + }, + } + opts := &PlanOptions{SourceBasePath: tmp} + + ex := NewExecutor(plan, opts, w, slog.Default()) + res, err := ex.Execute("user1") + if err != nil { + t.Fatalf("Execute err: %v", err) + } + if res.SkippedCount != 1 { + t.Fatalf("SkippedCount=%d", res.SkippedCount) + } + if res.Items[0].Error == nil || *res.Items[0].Error != "unknown action" { + t.Fatalf("Error=%#v", res.Items[0].Error) + } +} diff --git a/internal/importer/fixtures/fixture-1.zip b/internal/importer/fixtures/fixture-1.zip new file mode 100644 index 00000000..d95b4b32 Binary files /dev/null and b/internal/importer/fixtures/fixture-1.zip differ diff --git a/internal/importer/importer_service.go b/internal/importer/importer_service.go new file mode 100644 index 00000000..99f471c8 --- /dev/null +++ b/internal/importer/importer_service.go @@ -0,0 +1,220 @@ +package importer + +import ( + "fmt" + "io" + "log/slog" + "os" + "path/filepath" + "sort" + "strings" + "time" +) + +type ImporterService struct { + planner *Planner + planStore *PlanStore + extractor *ZipExtractor + logger *slog.Logger +} + +func NewImporterService(planner *Planner, planStore *PlanStore) *ImporterService { + return &ImporterService{ + planner: planner, + planStore: planStore, + extractor: NewZipExtractor(), + logger: slog.Default().With("component", "ImporterService"), + } +} + +// CreateImportPlanFromFolder creates an import plan from a folder path +func (is *ImporterService) createImportPlanFromFolder(folderPath string, targetBasePath string) (*PlanResult, error) { + // single-plan semantics: cleanup old plan workspace if present + if old, err := is.planStore.Get(); err == nil && old != nil { + err = os.RemoveAll(old.WorkspaceRoot) + if err != nil { + return nil, fmt.Errorf("cleanup old import workspace: %w", err) + } + is.planStore.Clear() + is.logger.Info("Old import workspace cleaned up") + } + + entries, err := FindMarkdownEntries(folderPath) + if err != nil { + return nil, err + } + + opts := PlanOptions{ + SourceBasePath: folderPath, + TargetBasePath: targetBasePath, + } + + plan, err := is.planner.CreatePlan(entries, opts) + if err != nil { + return nil, err + } + + is.planStore.Set(&StoredPlan{ + Plan: plan, + PlanOptions: opts, + WorkspaceRoot: folderPath, + CreatedAt: time.Now(), + }) + is.logger.Info("Import plan created", "entries", len(entries), "workspace", folderPath) + return plan, nil +} + +// GetCurrentPlan retrieves the currently stored import plan +func (is *ImporterService) GetCurrentPlan() (*PlanResult, error) { + sp, err := is.planStore.Get() + if err != nil { + return nil, err + } + return sp.Plan, nil +} + +// ClearCurrentPlan clears the currently stored import plan +func (is *ImporterService) ClearCurrentPlan() { + if sp, err := is.planStore.Get(); err == nil && sp != nil { + if err := os.RemoveAll(sp.WorkspaceRoot); err != nil { + is.logger.Error("remove workspace failed", "error", err) + } + } + is.planStore.Clear() +} + +// ExecuteCurrentPlan executes the currently stored import plan +func (is *ImporterService) ExecuteCurrentPlan(userID string) (*ExecutionResult, error) { + sp, err := is.planStore.Get() + if err != nil { + return nil, err + } + + exec := NewExecutor(sp.Plan, &sp.PlanOptions, is.planner.wiki, is.planner.log) + res, err := exec.Execute(userID) + if err != nil { + return nil, err + } + + // After successful execution, clear the plan + if sp, err := is.planStore.Get(); err == nil && sp != nil { + if err := os.RemoveAll(sp.WorkspaceRoot); err != nil { + is.logger.Error("remove workspace failed", "error", err) + } + } + is.planStore.Clear() + + return res, nil +} + +// FindMarkdownEntries finds markdown files in the given source base path +func FindMarkdownEntries(sourceBasePath string) ([]ImportMDFile, error) { + out := []ImportMDFile{} + + err := filepath.WalkDir(sourceBasePath, func(p string, d os.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() { + return nil + } + + if strings.ToLower(filepath.Ext(d.Name())) != ".md" { + return nil + } + + rel, err := filepath.Rel(sourceBasePath, p) + if err != nil { + return fmt.Errorf("rel: %w", err) + } + + out = append(out, ImportMDFile{ + SourcePath: filepath.ToSlash(rel), + }) + return nil + }) + + // Order entries by depth (shallow first) + if err == nil { + sort.SliceStable(out, func(i, j int) bool { + depthI := strings.Count(out[i].SourcePath, "/") + depthJ := strings.Count(out[j].SourcePath, "/") + if depthI == depthJ { + return out[i].SourcePath < out[j].SourcePath + } + return depthI < depthJ + }) + } + + // index.md should always come first if present + if err == nil { + sort.SliceStable(out, func(i, j int) bool { + nameI := strings.ToLower(filepath.Base(out[i].SourcePath)) + nameJ := strings.ToLower(filepath.Base(out[j].SourcePath)) + if nameI == "index.md" && nameJ != "index.md" { + return true + } + if nameJ == "index.md" && nameI != "index.md" { + return false + } + return false + }) + } + + if err != nil { + return nil, err + } + return out, nil +} + +// CreateImportPlanFromZipUpload creates an import plan from an uploaded zip file +func (is *ImporterService) CreateImportPlanFromZipUpload( + r io.Reader, + targetBasePath string, +) (*PlanResult, error) { + ws, err := is.extractZipReaderToTemp(r) + if err != nil { + return nil, fmt.Errorf("extract zip to temp: %w", err) + } + + plan, err := is.createImportPlanFromFolder(ws.Root, targetBasePath) + if err != nil { + if err := ws.Cleanup(); err != nil { + is.logger.Error("cleanup failed", "error", err) + } + return nil, fmt.Errorf("create import plan from folder: %w", err) + } + return plan, nil +} + +func (is *ImporterService) extractZipReaderToTemp(r io.Reader) (*ZipWorkspace, error) { + tempDir := filepath.Join(os.TempDir(), "wiki-imports") + if err := os.MkdirAll(tempDir, 0o755); err != nil { + return nil, fmt.Errorf("create import temp dir: %w", err) + } + + tmp, err := os.CreateTemp(tempDir, "import-*.zip") + if err != nil { + return nil, fmt.Errorf("create temp zip: %w", err) + } + tmpPath := tmp.Name() + defer func() { + if err := os.Remove(tmpPath); err != nil { + is.logger.Error("remove temp zip failed", "error", err) + } + }() + + if _, err := io.Copy(tmp, r); err != nil { + _ = tmp.Close() + return nil, fmt.Errorf("store uploaded zip: %w", err) + } + if err := tmp.Close(); err != nil { + return nil, fmt.Errorf("close temp zip: %w", err) + } + + ws, err := is.extractor.ExtractToTemp(tmpPath) + if err != nil { + return nil, fmt.Errorf("extract zip: %w", err) + } + return ws, nil +} diff --git a/internal/importer/importer_service_test.go b/internal/importer/importer_service_test.go new file mode 100644 index 00000000..c1cfb0b3 --- /dev/null +++ b/internal/importer/importer_service_test.go @@ -0,0 +1,269 @@ +package importer + +import ( + "errors" + "log/slog" + "os" + "path/filepath" + "strings" + "testing" + "time" + + "github.com/perber/wiki/internal/core/tree" +) + +// --- Helpers ---------------------------------------------------------------- + +func mustWrite(t *testing.T, base, rel, content string) string { + t.Helper() + abs := filepath.Join(base, filepath.FromSlash(rel)) + if err := os.MkdirAll(filepath.Dir(abs), 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + if err := os.WriteFile(abs, []byte(content), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + return abs +} + +func newServiceWithFakeWiki(t *testing.T, w *fakeWiki) *ImporterService { + t.Helper() + planner := NewPlanner(w, tree.NewSlugService()) + store := NewPlanStore() + return &ImporterService{ + planner: planner, + planStore: store, + extractor: NewZipExtractor(), // unused in these tests + logger: slog.Default().With("component", "ImporterServiceTest"), + } +} + +// --- Tests ------------------------------------------------------------------ + +func TestImporterService_createImportPlanFromFolder_StoresPlan(t *testing.T) { + tmp := t.TempDir() + mustWrite(t, tmp, "a.md", "# A\nbody") + + w := &fakeWiki{treeHash: "h1", lookups: map[string]*tree.PathLookup{}} + is := newServiceWithFakeWiki(t, w) + + plan, err := is.createImportPlanFromFolder(tmp, "") + if err != nil { + t.Fatalf("createImportPlanFromFolder err: %v", err) + } + if plan == nil || len(plan.Items) != 1 { + t.Fatalf("unexpected plan: %#v", plan) + } + // plan should have correct options + + if _, err := is.GetCurrentPlan(); err != nil { + t.Fatalf("GetCurrentPlan err: %v", err) + } +} + +func TestImporterService_createImportPlanFromFolder_CleansUpOldWorkspace(t *testing.T) { + // old workspace with a marker file + oldWS := t.TempDir() + marker := mustWrite(t, oldWS, "marker.txt", "x") + + // new workspace with md + newWS := t.TempDir() + mustWrite(t, newWS, "b.md", "# B") + + w := &fakeWiki{treeHash: "h1", lookups: map[string]*tree.PathLookup{}} + is := newServiceWithFakeWiki(t, w) + + // seed old plan in store + is.planStore.Set(&StoredPlan{ + Plan: &PlanResult{ID: "old", TreeHash: "h1"}, + PlanOptions: PlanOptions{SourceBasePath: oldWS}, + WorkspaceRoot: oldWS, + CreatedAt: time.Now(), + }) + + _, err := is.createImportPlanFromFolder(newWS, "") + if err != nil { + t.Fatalf("createImportPlanFromFolder err: %v", err) + } + + // old workspace should be removed + if _, statErr := os.Stat(marker); !os.IsNotExist(statErr) { + t.Fatalf("expected old workspace removed; statErr=%v", statErr) + } + + // store should now point to new workspace + + if _, err := is.GetCurrentPlan(); err != nil { + t.Fatalf("GetCurrentPlan err: %v", err) + } +} + +func TestImporterService_GetCurrentPlan_NoPlan(t *testing.T) { + w := &fakeWiki{treeHash: "h1", lookups: map[string]*tree.PathLookup{}} + is := newServiceWithFakeWiki(t, w) + + _, err := is.GetCurrentPlan() + if !errors.Is(err, ErrNoPlan) { + t.Fatalf("expected ErrNoPlan, got %v", err) + } +} + +func TestImporterService_ClearCurrentPlan(t *testing.T) { + tmp := t.TempDir() + mustWrite(t, tmp, "a.md", "# A") + + w := &fakeWiki{treeHash: "h1", lookups: map[string]*tree.PathLookup{}} + is := newServiceWithFakeWiki(t, w) + + _, err := is.createImportPlanFromFolder(tmp, "") + if err != nil { + t.Fatalf("createImportPlanFromFolder err: %v", err) + } + + is.ClearCurrentPlan() + _, err = is.GetCurrentPlan() + if !errors.Is(err, ErrNoPlan) { + t.Fatalf("expected ErrNoPlan after clear, got %v", err) + } +} + +func TestImporterService_ExecuteCurrentPlan_NoPlan(t *testing.T) { + w := &fakeWiki{treeHash: "h1", lookups: map[string]*tree.PathLookup{}} + is := newServiceWithFakeWiki(t, w) + + _, err := is.ExecuteCurrentPlan("user1") + if !errors.Is(err, ErrNoPlan) { + t.Fatalf("expected ErrNoPlan, got %v", err) + } +} + +func TestImporterService_ExecuteCurrentPlan_HappyPath_UsesExecutorAndStripsFrontmatter(t *testing.T) { + ws := t.TempDir() + mustWrite(t, ws, "a.md", "---\ntitle: X\n---\n\n# Heading\nBody") + + w := &fakeWiki{treeHash: "h1", lookups: map[string]*tree.PathLookup{}} + is := newServiceWithFakeWiki(t, w) + + plan, err := is.createImportPlanFromFolder(ws, "") + if err != nil { + t.Fatalf("createImportPlanFromFolder err: %v", err) + } + if plan.TreeHash != "h1" { + t.Fatalf("plan.TreeHash=%q want h1", plan.TreeHash) + } + + res, err := is.ExecuteCurrentPlan("user1") + if err != nil { + t.Fatalf("ExecuteCurrentPlan err: %v", err) + } + + if res.ImportedCount != 1 { + t.Fatalf("ImportedCount=%d want 1", res.ImportedCount) + } + if res.SkippedCount != 0 { + t.Fatalf("SkippedCount=%d want 0", res.SkippedCount) + } + if w.ensureCalls != 1 || w.updateCalls != 1 { + t.Fatalf("wiki calls ensure=%d update=%d", w.ensureCalls, w.updateCalls) + } + + if w.lastUpdatedContent == nil { + t.Fatalf("expected UpdatePage content") + } + if strings.Contains(*w.lastUpdatedContent, "title: X") || strings.Contains(*w.lastUpdatedContent, "---") { + t.Fatalf("frontmatter not stripped; got: %q", *w.lastUpdatedContent) + } + if !strings.Contains(*w.lastUpdatedContent, "# Heading") { + t.Fatalf("expected body to include heading; got: %q", *w.lastUpdatedContent) + } +} + +func TestImporterService_ExecuteCurrentPlan_ExecutorStalePlanPropagatesError(t *testing.T) { + ws := t.TempDir() + mustWrite(t, ws, "a.md", "# A") + + w := &fakeWiki{treeHash: "h1", lookups: map[string]*tree.PathLookup{}} + is := newServiceWithFakeWiki(t, w) + + plan, err := is.createImportPlanFromFolder(ws, "") + if err != nil { + t.Fatalf("createImportPlanFromFolder err: %v", err) + } + // make plan stale + plan.TreeHash = "OLD" + + _, err = is.ExecuteCurrentPlan("user1") + if err == nil { + t.Fatalf("expected stale plan error") + } + if !strings.Contains(err.Error(), "plan is stale") { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestFindMarkdownEntries_FindsMdRecursively_AndNormalizesSlashes(t *testing.T) { + base := t.TempDir() + mustWrite(t, base, "a.md", "x") + mustWrite(t, base, "b.txt", "x") + mustWrite(t, base, "sub/c.MD", "x") + mustWrite(t, base, "sub/deeper/d.md", "x") + + got, err := FindMarkdownEntries(base) + if err != nil { + t.Fatalf("FindMarkdownEntries err: %v", err) + } + + // collect paths in a set for stable assertion (WalkDir order is OS-dependent) + set := map[string]bool{} + for _, e := range got { + set[e.SourcePath] = true + // should be slash-normalized + if strings.Contains(e.SourcePath, `\`) { + t.Fatalf("SourcePath should be slash-normalized: %q", e.SourcePath) + } + } + + if !set["a.md"] { + t.Fatalf("missing a.md, got %#v", set) + } + if !set["sub/c.MD"] { + t.Fatalf("missing sub/c.MD, got %#v", set) + } + if !set["sub/deeper/d.md"] { + t.Fatalf("missing sub/deeper/d.md, got %#v", set) + } + if set["b.txt"] { + t.Fatalf("should not include b.txt") + } +} + +func TestImporterService_createImportPlanFromFolder_UsesTargetBasePath(t *testing.T) { + tmp := t.TempDir() + mustWrite(t, tmp, "a.md", "# A\nbody") + + w := &fakeWiki{treeHash: "h1", lookups: map[string]*tree.PathLookup{}} + is := newServiceWithFakeWiki(t, w) + + plan, err := is.createImportPlanFromFolder(tmp, "docs/imports") + if err != nil { + t.Fatalf("createImportPlanFromFolder err: %v", err) + } + if plan == nil || len(plan.Items) != 1 { + t.Fatalf("unexpected plan: %#v", plan) + } + + // Verify the plan item has the correct target path with the base path + item := plan.Items[0] + if item.TargetPath != "docs/imports/a" { + t.Fatalf("expected TargetPath 'docs/imports/a', got %q", item.TargetPath) + } + + // Verify the stored plan options has the correct target base path + sp, err := is.planStore.Get() + if err != nil { + t.Fatalf("Get plan err: %v", err) + } + if sp.PlanOptions.TargetBasePath != "docs/imports" { + t.Fatalf("expected TargetBasePath 'docs/imports', got %q", sp.PlanOptions.TargetBasePath) + } +} diff --git a/internal/importer/importer_wiki.go b/internal/importer/importer_wiki.go new file mode 100644 index 00000000..f0db788f --- /dev/null +++ b/internal/importer/importer_wiki.go @@ -0,0 +1,10 @@ +package importer + +import "github.com/perber/wiki/internal/core/tree" + +type ImporterWiki interface { + TreeHash() string + LookupPagePath(path string) (*tree.PathLookup, error) + EnsurePath(userID string, targetPath string, title string, kind *tree.NodeKind) (*tree.Page, error) + UpdatePage(userID string, id, title, slug string, content *string, kind *tree.NodeKind) (*tree.Page, error) +} diff --git a/internal/importer/plan_store.go b/internal/importer/plan_store.go new file mode 100644 index 00000000..a029297e --- /dev/null +++ b/internal/importer/plan_store.go @@ -0,0 +1,48 @@ +package importer + +import ( + "errors" + "sync" + "time" +) + +var ErrNoPlan = errors.New("no plan available") + +type StoredPlan struct { + Plan *PlanResult + PlanOptions PlanOptions + WorkspaceRoot string + CreatedAt time.Time +} + +type PlanStore struct { + mu sync.RWMutex + plan *StoredPlan +} + +func NewPlanStore() *PlanStore { + return &PlanStore{} +} + +func (ps *PlanStore) Set(sp *StoredPlan) { + ps.mu.Lock() + defer ps.mu.Unlock() + ps.plan = sp +} + +func (ps *PlanStore) Get() (*StoredPlan, error) { + ps.mu.RLock() + defer ps.mu.RUnlock() + if ps.plan == nil { + return nil, ErrNoPlan + } + return ps.plan, nil +} + +func (ps *PlanStore) Clear() *StoredPlan { + ps.mu.Lock() + defer ps.mu.Unlock() + old := ps.plan + ps.plan = nil + return old +} diff --git a/internal/importer/plan_store_test.go b/internal/importer/plan_store_test.go new file mode 100644 index 00000000..97699d68 --- /dev/null +++ b/internal/importer/plan_store_test.go @@ -0,0 +1,53 @@ +package importer + +import "testing" + +func TestPlanStoreSet(t *testing.T) { + s := NewPlanStore() + plan := &StoredPlan{} + s.Set(plan) + + retrieved, err := s.Get() + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if retrieved != plan { + t.Fatalf("expected retrieved plan to be the same as set plan") + } +} + +func TestPlanStoreGet(t *testing.T) { + s := NewPlanStore() + + _, err := s.Get() + if err == nil { + t.Fatalf("expected error when getting plan from empty store") + } +} + +func TestPlanStoreSetAndGet(t *testing.T) { + s := NewPlanStore() + plan := &StoredPlan{} + s.Set(plan) + + retrieved, err := s.Get() + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if retrieved != plan { + t.Fatalf("expected retrieved plan to be the same as set plan") + } +} + +func TestPlanStoreClear(t *testing.T) { + s := NewPlanStore() + plan := &StoredPlan{} + s.Set(plan) + + s.Clear() + + _, err := s.Get() + if err == nil { + t.Fatalf("expected error when getting plan from cleared store") + } +} diff --git a/internal/importer/planner.go b/internal/importer/planner.go new file mode 100644 index 00000000..e4d5bdf5 --- /dev/null +++ b/internal/importer/planner.go @@ -0,0 +1,214 @@ +package importer + +import ( + "errors" + "fmt" + "log/slog" + "os" + "path" + "path/filepath" + "strings" + + "github.com/perber/wiki/internal/core/markdown" + "github.com/perber/wiki/internal/core/shared" + "github.com/perber/wiki/internal/core/tree" +) + +type PlanAction string + +const ( + PlanActionCreate PlanAction = "create" // creates new node + PlanActionUpdate PlanAction = "update" // updates existing node + PlanActionSkip PlanAction = "skip" // skips existing node +) + +// ImportMDFile represents a markdown file to be imported +type ImportMDFile struct { + SourcePath string // relative path to the markdown file in the zip directory +} + +// PlanItem represents a single item in the import plan +type PlanItem struct { + SourcePath string `json:"source_path"` + TargetPath string `json:"target_path"` + Title string `json:"title"` + DesiredSlug string `json:"desired_slug"` + Kind tree.NodeKind `json:"kind"` + Exists bool `json:"exists"` + ExistingID *string `json:"existing_id"` + + Action PlanAction `json:"action"` + Conflicts []string `json:"conflicts"` + Notes []string `json:"notes"` +} + +// PlanOptions represents options for creating an import plan +type PlanOptions struct { + SourceBasePath string // base path in the import source + TargetBasePath string // base path in the wiki where to import +} + +// PlanResult represents the result of the import plan +type PlanResult struct { + ID string `json:"id"` + TreeHash string `json:"tree_hash"` // hash of the state of the wiki tree before import + Items []PlanItem `json:"items"` + Errors []string `json:"errors"` +} + +// Planner is responsible for creating an import plan +type Planner struct { + log *slog.Logger + wiki ImporterWiki + slugger *tree.SlugService +} + +// NewPlanner creates a new Planner +func NewPlanner(wiki ImporterWiki, slugger *tree.SlugService) *Planner { + return &Planner{ + log: slog.Default().With("component", "Planner"), + wiki: wiki, + slugger: slugger, + } +} + +// CreatePlan creates an import plan based on the provided entries and options +func (p *Planner) CreatePlan(entries []ImportMDFile, options PlanOptions) (*PlanResult, error) { + // Generate a unique ID for the new page + id, err := shared.GenerateUniqueID() + if err != nil { + return nil, fmt.Errorf("could not generate unique ID: %w", err) + } + result := &PlanResult{ + ID: id, + Items: []PlanItem{}, + Errors: []string{}, + TreeHash: p.wiki.TreeHash(), + } + for _, entry := range entries { + resEntry, err := p.analyzeEntry(entry, options) + if err != nil { + p.log.Warn("could not import resource", "source_path", entry.SourcePath, "error", err) + result.Errors = append(result.Errors, err.Error()) + continue + } + + result.Items = append(result.Items, *resEntry) + } + return result, nil +} + +// analyzeEntry analyzes a entry (directory or file) to be imported +func (p *Planner) analyzeEntry(mdFile ImportMDFile, options PlanOptions) (*PlanItem, error) { + // FS path for reading + sourcePath := filepath.Join(options.SourceBasePath, filepath.FromSlash(mdFile.SourcePath)) + + // Validate if sourcePath exists and is a file + info, err := os.Stat(sourcePath) + if err != nil { + return nil, err + } + if info.IsDir() { + return nil, errors.New("source path is a directory, expected a file: " + mdFile.SourcePath) + } + + // normalize source path (zip-ish) + rel := filepath.ToSlash(strings.TrimSpace(mdFile.SourcePath)) + rel = strings.TrimPrefix(rel, "/") + + filenameLower := strings.ToLower(path.Base(rel)) + sourceDir := path.Dir(rel) + if sourceDir == "." { + sourceDir = "" + } + + // normalize ONLY the source dir segments + normalizedSourceDir, err := p.slugger.NormalizePath(sourceDir, true) + if err != nil { + return nil, err + } + normalizedSourceDir = strings.Trim(normalizedSourceDir, "/") + + // compute wiki path (route) + targetBase := strings.Trim(strings.TrimSpace(options.TargetBasePath), "/") + + kind := tree.NodeKindPage + var wikiPath string + + if filenameLower == "index.md" { + kind = tree.NodeKindSection + wikiPath = strings.Trim(path.Join(targetBase, normalizedSourceDir), "/") + } else { + normalizedFilename := p.slugger.NormalizeFilename(filenameLower) // e.g. "my-page.md" + baseSlug := strings.TrimSuffix(normalizedFilename, path.Ext(normalizedFilename)) + wikiPath = strings.Trim(path.Join(targetBase, normalizedSourceDir, baseSlug), "/") + } + + // lookup existing + result, err := p.wiki.LookupPagePath(wikiPath) + if err != nil { + return nil, err + } + + var notes []string + md, err := markdown.LoadMarkdownFile(sourcePath) + if err != nil { + notes = append(notes, fmt.Sprintf("Failed to load markdown file for title extraction: %v", err)) + } + + // Determine fallback title + title := path.Base(wikiPath) // fallback to last segment of wiki path + if wikiPath == "" { + // For root-level index.md or empty paths, use filename without extension + title = strings.TrimSuffix(filenameLower, path.Ext(filenameLower)) + if title == "" { + title = "root" + } + } + + if md != nil { + var titleErr error + title, titleErr = md.GetTitle() + if titleErr != nil { + notes = append(notes, fmt.Sprintf("Failed to extract title from file: %v", titleErr)) + title = "unknown" // ensure title is set + } + } + + if !result.Exists { + // slug = last segment + slug := "" + if wikiPath != "" { + segs := strings.Split(wikiPath, "/") + slug = segs[len(segs)-1] + } + + return &PlanItem{ + SourcePath: mdFile.SourcePath, + TargetPath: wikiPath, + Title: title, + DesiredSlug: slug, + Kind: kind, + Exists: false, + Action: PlanActionCreate, + Notes: notes, + }, nil + } + + if len(result.Segments) == 0 { + return nil, errors.New("invalid lookup result with zero segments for existing path") + } + + last := result.Segments[len(result.Segments)-1] + return &PlanItem{ + SourcePath: mdFile.SourcePath, + TargetPath: wikiPath, + Title: title, + DesiredSlug: last.Slug, + Exists: true, + ExistingID: last.ID, + Kind: kind, + Action: PlanActionSkip, + Notes: notes, + }, nil +} diff --git a/internal/importer/planner_test.go b/internal/importer/planner_test.go new file mode 100644 index 00000000..74edcb2c --- /dev/null +++ b/internal/importer/planner_test.go @@ -0,0 +1,434 @@ +package importer + +import ( + "os" + "path/filepath" + "strings" + "testing" + + "github.com/perber/wiki/internal/core/tree" + "github.com/perber/wiki/internal/test_utils" +) + +type fakeWiki struct { + treeHash string + + // planner part + lookups map[string]*tree.PathLookup + lookupErr error + + // executor part + ensureCalls int + updateCalls int + lastUpdatedContent *string + + ensureErr error + ensureNilPage bool + updateErr error +} + +func (f *fakeWiki) TreeHash() string { return f.treeHash } + +func (f *fakeWiki) LookupPagePath(p string) (*tree.PathLookup, error) { + if f.lookupErr != nil { + return nil, f.lookupErr + } + if v, ok := f.lookups[p]; ok { + return v, nil + } + return &tree.PathLookup{Path: p, Exists: false, Segments: []tree.PathSegment{}}, nil +} + +func (f *fakeWiki) EnsurePath(userID string, targetPath string, title string, kind *tree.NodeKind) (*tree.Page, error) { + f.ensureCalls++ + if f.ensureErr != nil { + return nil, f.ensureErr + } + if f.ensureNilPage { + return nil, nil + } + k := tree.NodeKindPage + if kind != nil { + k = *kind + } + // create minimal page object + return &tree.Page{PageNode: &tree.PageNode{ + ID: "p1", + Title: title, + Slug: "slug", + Kind: k, + }}, nil +} + +func (f *fakeWiki) UpdatePage(userID string, id, title, slug string, content *string, kind *tree.NodeKind) (*tree.Page, error) { + f.updateCalls++ + f.lastUpdatedContent = content + if f.updateErr != nil { + return nil, f.updateErr + } + // simulate tree change after update + f.treeHash = f.treeHash + "-changed" + k := tree.NodeKindPage + if kind != nil { + k = *kind + } + return &tree.Page{PageNode: &tree.PageNode{ + ID: id, + Title: title, + Slug: slug, + Kind: k, + }}, nil +} + +func newPlannerWithFake(w *fakeWiki) *Planner { + return NewPlanner(w, tree.NewSlugService()) +} + +func TestPlanner_CreatePlan_CreateNewPage_NonIndex(t *testing.T) { + tmp := t.TempDir() + test_utils.WriteFile(t, tmp, "My Page.md", "# Hello\n\nbody") + + wiki := &fakeWiki{ + treeHash: "h1", + lookups: map[string]*tree.PathLookup{}, + } + p := newPlannerWithFake(wiki) + + res, err := p.CreatePlan([]ImportMDFile{{SourcePath: "My Page.md"}}, PlanOptions{ + SourceBasePath: tmp, + TargetBasePath: "/docs", + }) + if err != nil { + t.Fatalf("CreatePlan err: %v", err) + } + if res.TreeHash != "h1" { + t.Fatalf("TreeHash = %q", res.TreeHash) + } + if len(res.Errors) != 0 { + t.Fatalf("Errors = %#v", res.Errors) + } + if len(res.Items) != 1 { + t.Fatalf("Items len = %d", len(res.Items)) + } + + it := res.Items[0] + if it.Action != PlanActionCreate { + t.Fatalf("Action = %q", it.Action) + } + if it.Kind != tree.NodeKindPage { + t.Fatalf("Kind = %v", it.Kind) + } + if it.Title != "Hello" { + t.Fatalf("Title = %q", it.Title) + } + if it.TargetPath != "docs/my-page" { + t.Fatalf("TargetPath = %q (want docs/my-page)", it.TargetPath) + } + if it.DesiredSlug != "my-page" { + t.Fatalf("DesiredSlug = %q (want my-page)", it.DesiredSlug) + } +} + +func TestPlanner_CreatePlan_CreateNewSection_IndexMd(t *testing.T) { + tmp := t.TempDir() + test_utils.WriteFile(t, tmp, "Guides/index.md", "---\ntitle: Guides\n---\n\n# Ignored") + + wiki := &fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}} + p := newPlannerWithFake(wiki) + + res, err := p.CreatePlan([]ImportMDFile{{SourcePath: "Guides/index.md"}}, PlanOptions{ + SourceBasePath: tmp, + TargetBasePath: "docs", + }) + if err != nil { + t.Fatalf("CreatePlan err: %v", err) + } + it := res.Items[0] + + if it.Kind != tree.NodeKindSection { + t.Fatalf("Kind = %v", it.Kind) + } + if it.Action != PlanActionCreate { + t.Fatalf("Action = %q", it.Action) + } + if it.TargetPath != "docs/guides" { + t.Fatalf("TargetPath = %q (want docs/guides)", it.TargetPath) + } + if it.DesiredSlug != "guides" { + t.Fatalf("DesiredSlug = %q (want guides)", it.DesiredSlug) + } + if it.Title != "Guides" { + t.Fatalf("Title = %q", it.Title) + } +} + +func TestPlanner_CreatePlan_SkipExisting_UsesLookupLastSegment(t *testing.T) { + tmp := t.TempDir() + test_utils.WriteFile(t, tmp, "a.md", "# A") + + existingID := "id123" + existingKind := tree.NodeKindPage + existingTitle := "Existing A" + + wiki := &fakeWiki{ + treeHash: "h", + lookups: map[string]*tree.PathLookup{ + "docs/a": { + Path: "docs/a", + Exists: true, + Segments: []tree.PathSegment{ + {Slug: "docs", Exists: true}, + {Slug: "a", Exists: true, ID: &existingID, Kind: &existingKind, Title: &existingTitle}, + }, + }, + }, + } + p := newPlannerWithFake(wiki) + + res, err := p.CreatePlan([]ImportMDFile{{SourcePath: "a.md"}}, PlanOptions{ + SourceBasePath: tmp, + TargetBasePath: "docs", + }) + if err != nil { + t.Fatalf("CreatePlan err: %v", err) + } + if len(res.Errors) != 0 { + t.Fatalf("Errors = %#v", res.Errors) + } + + it := res.Items[0] + if it.Action != PlanActionSkip { + t.Fatalf("Action = %q", it.Action) + } + if !it.Exists { + t.Fatalf("Exists = false") + } + if it.ExistingID == nil || *it.ExistingID != existingID { + t.Fatalf("ExistingID = %#v (want %q)", it.ExistingID, existingID) + } + if it.DesiredSlug != "a" { + t.Fatalf("DesiredSlug = %q (want a)", it.DesiredSlug) + } +} + +func TestPlanner_CreatePlan_Error_SourceMissing_IsCollected(t *testing.T) { + tmp := t.TempDir() + wiki := &fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}} + p := newPlannerWithFake(wiki) + + res, err := p.CreatePlan([]ImportMDFile{{SourcePath: "missing.md"}}, PlanOptions{ + SourceBasePath: tmp, + TargetBasePath: "docs", + }) + if err != nil { + t.Fatalf("CreatePlan err: %v", err) + } + if len(res.Items) != 0 { + t.Fatalf("Items len = %d (want 0)", len(res.Items)) + } + if len(res.Errors) != 1 { + t.Fatalf("Errors len = %d (want 1)", len(res.Errors)) + } +} + +func TestPlanner_CreatePlan_Error_SourceIsDirectory_IsCollected(t *testing.T) { + tmp := t.TempDir() + if err := os.MkdirAll(filepath.Join(tmp, "dir"), 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + + wiki := &fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}} + p := newPlannerWithFake(wiki) + + res, err := p.CreatePlan([]ImportMDFile{{SourcePath: "dir"}}, PlanOptions{ + SourceBasePath: tmp, + TargetBasePath: "docs", + }) + if err != nil { + t.Fatalf("CreatePlan err: %v", err) + } + if len(res.Items) != 0 { + t.Fatalf("Items len = %d (want 0)", len(res.Items)) + } + if len(res.Errors) != 1 { + t.Fatalf("Errors len = %d (want 1)", len(res.Errors)) + } +} + +func TestPlanner_CreatePlan_Error_ExistingZeroSegments_IsCollected(t *testing.T) { + tmp := t.TempDir() + test_utils.WriteFile(t, tmp, "x.md", "# X") + + wiki := &fakeWiki{ + treeHash: "h", + lookups: map[string]*tree.PathLookup{ + "docs/x": {Path: "docs/x", Exists: true, Segments: []tree.PathSegment{}}, + }, + } + p := newPlannerWithFake(wiki) + + res, err := p.CreatePlan([]ImportMDFile{{SourcePath: "x.md"}}, PlanOptions{ + SourceBasePath: tmp, + TargetBasePath: "docs", + }) + if err != nil { + t.Fatalf("CreatePlan err: %v", err) + } + if len(res.Items) != 0 { + t.Fatalf("Items len = %d (want 0)", len(res.Items)) + } + if len(res.Errors) != 1 { + t.Fatalf("Errors len = %d (want 1)", len(res.Errors)) + } +} + +// ---- Title extraction ------------------------------------------------------- + +func TestPlanner_CreatePlan_TitleExtractionError_AddsNote(t *testing.T) { + tmp := t.TempDir() + abs := test_utils.WriteFile(t, tmp, "unreadable.md", "# Title") + + // Make file unreadable to trigger extraction error + if err := os.Chmod(abs, 0o000); err != nil { + t.Fatalf("chmod: %v", err) + } + defer func() { + if err := os.Chmod(abs, 0o644); err != nil { // restore for cleanup + t.Fatalf("chmod restore: %v", err) + } + }() + + wiki := &fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}} + p := newPlannerWithFake(wiki) + + res, err := p.CreatePlan([]ImportMDFile{{SourcePath: "unreadable.md"}}, PlanOptions{ + SourceBasePath: tmp, + TargetBasePath: "docs", + }) + if err != nil { + t.Fatalf("CreatePlan err: %v", err) + } + if len(res.Errors) != 0 { + t.Fatalf("Errors = %#v", res.Errors) + } + if len(res.Items) != 1 { + t.Fatalf("Items len = %d (want 1)", len(res.Items)) + } + + it := res.Items[0] + if len(it.Notes) != 1 { + t.Fatalf("Notes len = %d (want 1)", len(it.Notes)) + } + if !strings.Contains(it.Notes[0], "Failed to load markdown file for title extraction") { + t.Fatalf("Note = %q (should contain 'Failed to load markdown file for title extraction')", it.Notes[0]) + } + // Title should still be set (fallback to filename) + if it.Title != "unreadable" { + t.Fatalf("Title = %q (want unreadable)", it.Title) + } +} + +func TestPlanner_analyzeEntry_NormalizesSourceDirSegments(t *testing.T) { + // "My Guides/Intro.md" -> "my-guides/intro" (SlugService.NormalizePath + NormalizeFilename) + tmp := t.TempDir() + test_utils.WriteFile(t, tmp, "My Guides/Intro.md", "# Intro") + + wiki := &fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}} + p := newPlannerWithFake(wiki) + + res, err := p.CreatePlan([]ImportMDFile{{SourcePath: "My Guides/Intro.md"}}, PlanOptions{ + SourceBasePath: tmp, + TargetBasePath: "docs", + }) + if err != nil { + t.Fatalf("CreatePlan err: %v", err) + } + if len(res.Errors) != 0 { + t.Fatalf("Errors = %#v", res.Errors) + } + if res.Items[0].TargetPath != "docs/my-guides/intro" { + t.Fatalf("TargetPath = %q (want docs/my-guides/intro)", res.Items[0].TargetPath) + } +} + +func TestPlanner_analyzeEntry_InvalidSourceDirSegment_ReturnsError(t *testing.T) { + // NormalizePath(validate=true) nutzt IsValidSlug() nach slug.Make(). + // Ein Segment wie "!!!" sluggt zu "" => invalid. + tmp := t.TempDir() + test_utils.WriteFile(t, tmp, "!!!/a.md", "# A") + + wiki := &fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}} + p := newPlannerWithFake(wiki) + + res, err := p.CreatePlan([]ImportMDFile{{SourcePath: "!!!/a.md"}}, PlanOptions{ + SourceBasePath: tmp, + TargetBasePath: "docs", + }) + if err != nil { + t.Fatalf("CreatePlan err: %v", err) + } + if len(res.Items) != 0 { + t.Fatalf("Items len = %d (want 0)", len(res.Items)) + } + if len(res.Errors) != 1 { + t.Fatalf("Errors len = %d (want 1)", len(res.Errors)) + } + // optional: grobe Assertion, dass es ein Validate-Fehler ist + if res.Errors[0] == "" { + t.Fatalf("unexpected error: %v", res.Errors[0]) + } +} + +func TestPlanner_CreatePlan_RootIndexMd_EmptyWikiPath_UsesFallbackTitle(t *testing.T) { + // Test case for root-level index.md with empty TargetBasePath and markdown loading failure + // When wikiPath is empty, path.Base("") returns ".", which is not meaningful. + // The fix should use filename without extension as fallback. + tmp := t.TempDir() + abs := test_utils.WriteFile(t, tmp, "index.md", "# Title") + + // Make file unreadable to trigger markdown loading failure + if err := os.Chmod(abs, 0o000); err != nil { + t.Fatalf("chmod: %v", err) + } + defer func() { + if err := os.Chmod(abs, 0o644); err != nil { // restore for cleanup + t.Fatalf("chmod restore: %v", err) + } + }() + + wiki := &fakeWiki{treeHash: "h", lookups: map[string]*tree.PathLookup{}} + p := newPlannerWithFake(wiki) + + res, err := p.CreatePlan([]ImportMDFile{{SourcePath: "index.md"}}, PlanOptions{ + SourceBasePath: tmp, + TargetBasePath: "", // empty target base path + }) + if err != nil { + t.Fatalf("CreatePlan err: %v", err) + } + if len(res.Errors) != 0 { + t.Fatalf("Errors = %#v", res.Errors) + } + if len(res.Items) != 1 { + t.Fatalf("Items len = %d (want 1)", len(res.Items)) + } + + it := res.Items[0] + if it.TargetPath != "" { + t.Fatalf("TargetPath = %q (want empty)", it.TargetPath) + } + if it.Kind != tree.NodeKindSection { + t.Fatalf("Kind = %v (want Section)", it.Kind) + } + // The title should fallback to "index" (filename without .md), not "." from path.Base("") + if it.Title != "index" { + t.Fatalf("Title = %q (want index as fallback when wikiPath is empty and markdown fails)", it.Title) + } + // Should have a note about failed markdown loading + if len(it.Notes) == 0 { + t.Fatalf("Expected notes about failed markdown loading") + } + if !strings.Contains(it.Notes[0], "Failed to load markdown file for title extraction") { + t.Fatalf("Note = %q (should contain 'Failed to load markdown file for title extraction')", it.Notes[0]) + } +} diff --git a/internal/importer/zip_extractor.go b/internal/importer/zip_extractor.go new file mode 100644 index 00000000..970b2b30 --- /dev/null +++ b/internal/importer/zip_extractor.go @@ -0,0 +1,107 @@ +package importer + +import ( + "archive/zip" + "fmt" + "io" + "log/slog" + "os" + "path/filepath" + "strings" +) + +type ZipExtractor struct { + log *slog.Logger +} + +func NewZipExtractor() *ZipExtractor { + return &ZipExtractor{ + log: slog.Default().With("component", "ZipExtractor"), + } +} + +func (x *ZipExtractor) ExtractToTemp(zipPath string) (*ZipWorkspace, error) { + r, err := zip.OpenReader(zipPath) + if err != nil { + return nil, fmt.Errorf("open zip: %w", err) + } + defer r.Close() + + root, err := os.MkdirTemp("", "import-*") + if err != nil { + return nil, fmt.Errorf("mkdtemp: %w", err) + } + + ws := &ZipWorkspace{Root: root} + // Helper to clean up and return error + fail := func(e error) (*ZipWorkspace, error) { + if err = ws.Cleanup(); err != nil { + x.log.Error("cleanup failed", "error", err) + } + return nil, e + } + + for _, f := range r.File { + name := strings.TrimSpace(f.Name) + if name == "" { + continue + } + if f.FileInfo().IsDir() { + continue + } + + destPath, err := safeJoin(ws.Root, name) + if err != nil { + return fail(fmt.Errorf("invalid zip entry %q: %w", f.Name, err)) + } + + if err := os.MkdirAll(filepath.Dir(destPath), 0o755); err != nil { + return fail(fmt.Errorf("mkdir: %w", err)) + } + + // Extract single file in inner scope to ensure deterministic cleanup per iteration + if err := func() error { + rc, err := f.Open() + if err != nil { + return fmt.Errorf("open zip entry: %w", err) + } + defer rc.Close() + + out, err := os.OpenFile(destPath, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0o644) + if err != nil { + return fmt.Errorf("create file: %w", err) + } + defer func() { + if err := out.Close(); err != nil { + x.log.Error("close failed", "error", err) + } + }() + + if _, err := io.Copy(out, rc); err != nil { + return fmt.Errorf("write file: %w", err) + } + + return nil + }(); err != nil { + return fail(err) + } + } + + return ws, nil +} + +func safeJoin(baseDir, zipEntryName string) (string, error) { + clean := filepath.Clean(filepath.FromSlash(zipEntryName)) + if filepath.IsAbs(clean) { + return "", fmt.Errorf("absolute path not allowed") + } + dest := filepath.Join(baseDir, clean) + + baseClean := filepath.Clean(baseDir) + string(filepath.Separator) + destClean := filepath.Clean(dest) + + if !strings.HasPrefix(destClean+string(filepath.Separator), baseClean) { + return "", fmt.Errorf("path traversal detected: %q", zipEntryName) + } + return destClean, nil +} diff --git a/internal/importer/zip_extractor_test.go b/internal/importer/zip_extractor_test.go new file mode 100644 index 00000000..e9e89e52 --- /dev/null +++ b/internal/importer/zip_extractor_test.go @@ -0,0 +1,66 @@ +package importer + +import ( + "os" + "path/filepath" + "testing" +) + +func TestZipExtractor_ValidateExtractedFiles(t *testing.T) { + currentDir, err := os.Getwd() + if err != nil { + t.Fatalf("Failed to get current directory: %v", err) + } + zipPath := "fixtures/fixture-1.zip" + + extractor := NewZipExtractor() + ws, err := extractor.ExtractToTemp(filepath.Join(currentDir, zipPath)) + if err != nil { + t.Fatalf("ExtractToTemp failed: %v", err) + } + defer func() { + if err := ws.Cleanup(); err != nil { + t.Fatalf("Cleanup failed: %v", err) + } + }() + + // Check if expected files exist + expectedFiles := []string{ + "home.md", + "features/index.md", + "features/mermaind.md", + } + + for _, relPath := range expectedFiles { + fullPath := filepath.Join(ws.Root, relPath) + if _, err := os.Stat(fullPath); os.IsNotExist(err) { + t.Errorf("Expected file %s does not exist", relPath) + } + } +} + +func TestZipExtractor_Cleanup(t *testing.T) { + currentDir, err := os.Getwd() + if err != nil { + t.Fatalf("Failed to get current directory: %v", err) + } + zipPath := "fixtures/fixture-1.zip" + + extractor := NewZipExtractor() + ws, err := extractor.ExtractToTemp(filepath.Join(currentDir, zipPath)) + if err != nil { + t.Fatalf("ExtractToTemp failed: %v", err) + } + + workspaceRoot := ws.Root + + // Cleanup + if err := ws.Cleanup(); err != nil { + t.Fatalf("Cleanup failed: %v", err) + } + + // Verify cleanup + if _, err := os.Stat(workspaceRoot); !os.IsNotExist(err) { + t.Errorf("Workspace root %s still exists after cleanup", workspaceRoot) + } +} diff --git a/internal/importer/zip_workspace.go b/internal/importer/zip_workspace.go new file mode 100644 index 00000000..df880a0a --- /dev/null +++ b/internal/importer/zip_workspace.go @@ -0,0 +1,14 @@ +package importer + +import "os" + +type ZipWorkspace struct { + Root string +} + +func (ws *ZipWorkspace) Cleanup() error { + if ws == nil || ws.Root == "" { + return nil + } + return os.RemoveAll(ws.Root) +} diff --git a/internal/links/link_service_test.go b/internal/links/link_service_test.go index c9517574..37118b16 100644 --- a/internal/links/link_service_test.go +++ b/internal/links/link_service_test.go @@ -6,6 +6,11 @@ import ( "github.com/perber/wiki/internal/core/tree" ) +func pageNodeKind() *tree.NodeKind { + kind := tree.NodeKindPage + return &kind +} + func TestExtractLinksFromMarkdown_FiltersExternalAndNormalizes(t *testing.T) { md := ` # Example @@ -58,18 +63,18 @@ func setupTreeForLinksTest(t *testing.T) (*tree.TreeService, string, string) { } // create "docs" under root - docsIDPtr, err := ts.CreatePage("system", nil, "Docs", "docs") + docsIDPtr, err := ts.CreateNode("system", nil, "Docs", "docs", pageNodeKind()) if err != nil { t.Fatalf("CreatePage docs failed: %v", err) } docsID := *docsIDPtr // create "page1" and "page2" under docs - page1IDPtr, err := ts.CreatePage("system", &docsID, "Page 1", "page1") + page1IDPtr, err := ts.CreateNode("system", &docsID, "Page 1", "page1", pageNodeKind()) if err != nil { t.Fatalf("CreatePage page1 failed: %v", err) } - page2IDPtr, err := ts.CreatePage("system", &docsID, "Page 2", "page2") + page2IDPtr, err := ts.CreateNode("system", &docsID, "Page 2", "page2", pageNodeKind()) if err != nil { t.Fatalf("CreatePage page2 failed: %v", err) } @@ -168,13 +173,13 @@ func setupLinkService(t *testing.T) (*LinkService, *tree.TreeService, *LinksStor func createSimpleLinkedPages(t *testing.T, ts *tree.TreeService) (pageAID, pageBID string) { t.Helper() - aIDPtr, err := ts.CreatePage("system", nil, "Page A", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Page A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage a failed: %v", err) } pageAID = *aIDPtr - bIDPtr, err := ts.CreatePage("system", nil, "Page B", "b") + bIDPtr, err := ts.CreateNode("system", nil, "Page B", "b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage b failed: %v", err) } @@ -185,7 +190,7 @@ func createSimpleLinkedPages(t *testing.T, ts *tree.TreeService) (pageAID, pageB t.Fatalf("GetPage a failed: %v", err) } contentA := "Link to B: [Go to B](/b)" - if err := ts.UpdatePage("system", aPage.ID, aPage.Title, aPage.Slug, contentA); err != nil { + if err := ts.UpdateNode("system", aPage.ID, aPage.Title, aPage.Slug, &contentA); err != nil { t.Fatalf("UpdatePage a failed: %v", err) } @@ -194,7 +199,7 @@ func createSimpleLinkedPages(t *testing.T, ts *tree.TreeService) (pageAID, pageB t.Fatalf("GetPage b failed: %v", err) } contentB := "# Page B\nNo outgoing links." - if err := ts.UpdatePage("system", bPage.ID, bPage.Title, bPage.Slug, contentB); err != nil { + if err := ts.UpdateNode("system", bPage.ID, bPage.Title, bPage.Slug, &contentB); err != nil { t.Fatalf("UpdatePage b failed: %v", err) } @@ -242,7 +247,8 @@ func TestLinkService_IndexAllPages_ReplacesExistingLinks(t *testing.T) { if err != nil { t.Fatalf("GetPage a failed: %v", err) } - if err := ts.UpdatePage("system", aPage.ID, aPage.Title, aPage.Slug, "No more links."); err != nil { + var noLinks string = "No more links." + if err := ts.UpdateNode("system", aPage.ID, aPage.Title, aPage.Slug, &noLinks); err != nil { t.Fatalf("UpdatePage a failed: %v", err) } @@ -355,9 +361,9 @@ func TestLinkService_GetOutgoingLinksForPage_ReturnsOutgoingLinks(t *testing.T) func TestLinkService_GetOutgoingLinksForPage_NoOutgoings(t *testing.T) { svc, ts, _ := setupLinkService(t) - aIDPtr, err := ts.CreatePage("system", nil, "Lonely Page", "lonely") + aIDPtr, err := ts.CreateNode("system", nil, "Lonely Page", "lonely", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage lonely failed: %v", err) + t.Fatalf("CreateNode lonely failed: %v", err) } lonelyID := *aIDPtr @@ -366,8 +372,9 @@ func TestLinkService_GetOutgoingLinksForPage_NoOutgoings(t *testing.T) { t.Fatalf("GetPage lonely failed: %v", err) } - if err := ts.UpdatePage("system", page.ID, page.Title, page.Slug, "Just some text, no links."); err != nil { - t.Fatalf("UpdatePage lonely failed: %v", err) + var noLinks string = "Just some text, no links." + if err := ts.UpdateNode("system", page.ID, page.Title, page.Slug, &noLinks); err != nil { + t.Fatalf("UpdateNode lonely failed: %v", err) } if err := svc.IndexAllPages(); err != nil { @@ -434,9 +441,9 @@ func TestToOutgoingResult_MapsOutgoingToResultItems(t *testing.T) { func TestLinkService_LateCreatedTarget_BecomesResolvedAfterReindex(t *testing.T) { svc, ts, _ := setupLinkService(t) - aIDPtr, err := ts.CreatePage("system", nil, "Page A", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Page A", "a", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage a failed: %v", err) + t.Fatalf("CreateNode a failed: %v", err) } pageAID := *aIDPtr @@ -444,8 +451,9 @@ func TestLinkService_LateCreatedTarget_BecomesResolvedAfterReindex(t *testing.T) if err != nil { t.Fatalf("GetPage a failed: %v", err) } - if err := ts.UpdatePage("system", aPage.ID, aPage.Title, aPage.Slug, "Link to B: [Go](/b)"); err != nil { - t.Fatalf("UpdatePage a failed: %v", err) + var linkToB string = "Link to B: [Go](/b)" + if err := ts.UpdateNode("system", aPage.ID, aPage.Title, aPage.Slug, &linkToB); err != nil { + t.Fatalf("UpdateNode a failed: %v", err) } if err := svc.IndexAllPages(); err != nil { @@ -469,9 +477,9 @@ func TestLinkService_LateCreatedTarget_BecomesResolvedAfterReindex(t *testing.T) t.Fatalf("expected empty ToPageID for broken link, got %q", out1.Outgoings[0].ToPageID) } - bIDPtr, err := ts.CreatePage("system", nil, "Page B", "b") + bIDPtr, err := ts.CreateNode("system", nil, "Page B", "b", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage b failed: %v", err) + t.Fatalf("CreateNode b failed: %v", err) } pageBID := *bIDPtr @@ -479,8 +487,9 @@ func TestLinkService_LateCreatedTarget_BecomesResolvedAfterReindex(t *testing.T) if err != nil { t.Fatalf("GetPage b failed: %v", err) } - if err := ts.UpdatePage("system", bPage.ID, bPage.Title, bPage.Slug, "# Page B"); err != nil { - t.Fatalf("UpdatePage b failed: %v", err) + var pageBContent string = "# Page B" + if err := ts.UpdateNode("system", bPage.ID, bPage.Title, bPage.Slug, &pageBContent); err != nil { + t.Fatalf("UpdateNode b failed: %v", err) } if err := svc.IndexAllPages(); err != nil { @@ -519,9 +528,9 @@ func TestLinkService_LateCreatedTarget_BecomesResolvedAfterReindex(t *testing.T) func TestLinkService_HealOnPageCreate_ResolvesBrokenLinksWithoutReindex(t *testing.T) { svc, ts, _ := setupLinkService(t) - aIDPtr, err := ts.CreatePage("system", nil, "Page A", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Page A", "a", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage A failed: %v", err) + t.Fatalf("CreateNode A failed: %v", err) } pageAID := *aIDPtr @@ -529,8 +538,9 @@ func TestLinkService_HealOnPageCreate_ResolvesBrokenLinksWithoutReindex(t *testi if err != nil { t.Fatalf("GetPage A failed: %v", err) } - if err := ts.UpdatePage("system", pageA.ID, pageA.Title, pageA.Slug, "Link to B: [Go](/b)"); err != nil { - t.Fatalf("UpdatePage A failed: %v", err) + var linkToB string = "Link to B: [Go](/b)" + if err := ts.UpdateNode("system", pageA.ID, pageA.Title, pageA.Slug, &linkToB); err != nil { + t.Fatalf("UpdateNode A failed: %v", err) } if err := svc.IndexAllPages(); err != nil { @@ -555,9 +565,9 @@ func TestLinkService_HealOnPageCreate_ResolvesBrokenLinksWithoutReindex(t *testi t.Fatalf("expected empty ToPageID before heal, got %q", out1.Outgoings[0].ToPageID) } - bIDPtr, err := ts.CreatePage("system", nil, "Page B", "b") + bIDPtr, err := ts.CreateNode("system", nil, "Page B", "b", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage B failed: %v", err) + t.Fatalf("CreateNode B failed: %v", err) } pageBID := *bIDPtr @@ -604,21 +614,21 @@ func TestLinksStore_GetBrokenIncomingForPath_ReturnsBrokenLinks(t *testing.T) { svc, ts, store := setupLinkService(t) // Create three pages: A, B, C - aIDPtr, err := ts.CreatePage("system", nil, "Page A", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Page A", "a", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage A failed: %v", err) + t.Fatalf("CreateNode A failed: %v", err) } pageAID := *aIDPtr - bIDPtr, err := ts.CreatePage("system", nil, "Page B", "b") + bIDPtr, err := ts.CreateNode("system", nil, "Page B", "b", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage B failed: %v", err) + t.Fatalf("CreateNode B failed: %v", err) } pageBID := *bIDPtr - cIDPtr, err := ts.CreatePage("system", nil, "Page C", "c") + cIDPtr, err := ts.CreateNode("system", nil, "Page C", "c", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage C failed: %v", err) + t.Fatalf("CreateNode C failed: %v", err) } pageCID := *cIDPtr @@ -627,16 +637,17 @@ func TestLinksStore_GetBrokenIncomingForPath_ReturnsBrokenLinks(t *testing.T) { if err != nil { t.Fatalf("GetPage A failed: %v", err) } - if err := ts.UpdatePage("system", pageA.ID, pageA.Title, pageA.Slug, "Link: [Missing](/nonexistent)"); err != nil { - t.Fatalf("UpdatePage A failed: %v", err) + var linkToMissing string = "Link: [Missing](/nonexistent)" + if err := ts.UpdateNode("system", pageA.ID, pageA.Title, pageA.Slug, &linkToMissing); err != nil { + t.Fatalf("UpdateNode A failed: %v", err) } pageB, err := ts.GetPage(pageBID) if err != nil { t.Fatalf("GetPage B failed: %v", err) } - if err := ts.UpdatePage("system", pageB.ID, pageB.Title, pageB.Slug, "Link: [Missing](/nonexistent)"); err != nil { - t.Fatalf("UpdatePage B failed: %v", err) + if err := ts.UpdateNode("system", pageB.ID, pageB.Title, pageB.Slug, &linkToMissing); err != nil { + t.Fatalf("UpdateNode B failed: %v", err) } // Page C links to a different broken page @@ -644,8 +655,9 @@ func TestLinksStore_GetBrokenIncomingForPath_ReturnsBrokenLinks(t *testing.T) { if err != nil { t.Fatalf("GetPage C failed: %v", err) } - if err := ts.UpdatePage("system", pageC.ID, pageC.Title, pageC.Slug, "Link: [Other](/other-missing)"); err != nil { - t.Fatalf("UpdatePage C failed: %v", err) + var linkToOther string = "Link: [Other](/other-missing)" + if err := ts.UpdateNode("system", pageC.ID, pageC.Title, pageC.Slug, &linkToOther); err != nil { + t.Fatalf("UpdateNode C failed: %v", err) } // Index all pages to create broken links @@ -692,15 +704,15 @@ func TestLinksStore_GetBrokenIncomingForPath_ReturnsBrokenLinks(t *testing.T) { func TestLinksStore_GetBrokenIncomingForPath_FiltersByPath(t *testing.T) { svc, ts, store := setupLinkService(t) - aIDPtr, err := ts.CreatePage("system", nil, "Page A", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Page A", "a", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage A failed: %v", err) + t.Fatalf("CreateNode A failed: %v", err) } pageAID := *aIDPtr - bIDPtr, err := ts.CreatePage("system", nil, "Page B", "b") + bIDPtr, err := ts.CreateNode("system", nil, "Page B", "b", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage B failed: %v", err) + t.Fatalf("CreateNode B failed: %v", err) } pageBID := *bIDPtr @@ -709,8 +721,9 @@ func TestLinksStore_GetBrokenIncomingForPath_FiltersByPath(t *testing.T) { if err != nil { t.Fatalf("GetPage A failed: %v", err) } - if err := ts.UpdatePage("system", pageA.ID, pageA.Title, pageA.Slug, "Link: [Missing1](/missing1)"); err != nil { - t.Fatalf("UpdatePage A failed: %v", err) + var linkToMissing1 string = "Link: [Missing1](/missing1)" + if err := ts.UpdateNode("system", pageA.ID, pageA.Title, pageA.Slug, &linkToMissing1); err != nil { + t.Fatalf("UpdateNode A failed: %v", err) } // Page B links to "/missing2" @@ -718,8 +731,9 @@ func TestLinksStore_GetBrokenIncomingForPath_FiltersByPath(t *testing.T) { if err != nil { t.Fatalf("GetPage B failed: %v", err) } - if err := ts.UpdatePage("system", pageB.ID, pageB.Title, pageB.Slug, "Link: [Missing2](/missing2)"); err != nil { - t.Fatalf("UpdatePage B failed: %v", err) + var linkToMissing2 string = "Link: [Missing2](/missing2)" + if err := ts.UpdateNode("system", pageB.ID, pageB.Title, pageB.Slug, &linkToMissing2); err != nil { + t.Fatalf("UpdateNode B failed: %v", err) } if err := svc.IndexAllPages(); err != nil { @@ -756,15 +770,15 @@ func TestLinksStore_GetBrokenIncomingForPath_FiltersByPath(t *testing.T) { func TestLinksStore_GetBrokenIncomingForPath_EmptyWhenNoBrokenLinks(t *testing.T) { svc, ts, store := setupLinkService(t) - aIDPtr, err := ts.CreatePage("system", nil, "Page A", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Page A", "a", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage A failed: %v", err) + t.Fatalf("CreateNode A failed: %v", err) } pageAID := *aIDPtr - _, err = ts.CreatePage("system", nil, "Page B", "b") + _, err = ts.CreateNode("system", nil, "Page B", "b", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage B failed: %v", err) + t.Fatalf("CreateNode B failed: %v", err) } // Page A links to existing Page B (not broken) @@ -772,8 +786,9 @@ func TestLinksStore_GetBrokenIncomingForPath_EmptyWhenNoBrokenLinks(t *testing.T if err != nil { t.Fatalf("GetPage A failed: %v", err) } - if err := ts.UpdatePage("system", pageA.ID, pageA.Title, pageA.Slug, "Link: [To B](/b)"); err != nil { - t.Fatalf("UpdatePage A failed: %v", err) + var linkToB string = "Link: [To B](/b)" + if err := ts.UpdateNode("system", pageA.ID, pageA.Title, pageA.Slug, &linkToB); err != nil { + t.Fatalf("UpdateNode A failed: %v", err) } if err := svc.IndexAllPages(); err != nil { @@ -805,19 +820,19 @@ func TestLinksStore_GetBrokenIncomingForPath_OrdersByFromTitle(t *testing.T) { svc, ts, store := setupLinkService(t) // Create three pages with titles that should be ordered alphabetically - zIDPtr, err := ts.CreatePage("system", nil, "Zebra Page", "z") + zIDPtr, err := ts.CreateNode("system", nil, "Zebra Page", "z", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage Z failed: %v", err) + t.Fatalf("CreateNode Z failed: %v", err) } - aIDPtr, err := ts.CreatePage("system", nil, "Alpha Page", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Alpha Page", "a", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage A failed: %v", err) + t.Fatalf("CreateNode A failed: %v", err) } - mIDPtr, err := ts.CreatePage("system", nil, "Middle Page", "m") + mIDPtr, err := ts.CreateNode("system", nil, "Middle Page", "m", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage M failed: %v", err) + t.Fatalf("CreateNode M failed: %v", err) } // All three pages link to the same non-existent page @@ -827,8 +842,9 @@ func TestLinksStore_GetBrokenIncomingForPath_OrdersByFromTitle(t *testing.T) { if err != nil { t.Fatalf("GetPage(%s) failed: %v", id, err) } - if err := ts.UpdatePage("system", page.ID, page.Title, page.Slug, "Link: [Missing](/missing)"); err != nil { - t.Fatalf("UpdatePage(%s) failed: %v", id, err) + var linkToMissing string = "Link: [Missing](/missing)" + if err := ts.UpdateNode("system", page.ID, page.Title, page.Slug, &linkToMissing); err != nil { + t.Fatalf("UpdateNode(%s) failed: %v", id, err) } } @@ -859,9 +875,9 @@ func TestLinksStore_GetBrokenIncomingForPath_OnlyReturnsBrokenNotResolved(t *tes svc, ts, store := setupLinkService(t) // Create Page A that links to a non-existent page - aIDPtr, err := ts.CreatePage("system", nil, "Page A", "a") + aIDPtr, err := ts.CreateNode("system", nil, "Page A", "a", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage A failed: %v", err) + t.Fatalf("CreateNode A failed: %v", err) } pageAID := *aIDPtr @@ -869,8 +885,9 @@ func TestLinksStore_GetBrokenIncomingForPath_OnlyReturnsBrokenNotResolved(t *tes if err != nil { t.Fatalf("GetPage A failed: %v", err) } - if err := ts.UpdatePage("system", pageA.ID, pageA.Title, pageA.Slug, "Link: [To B](/b)"); err != nil { - t.Fatalf("UpdatePage A failed: %v", err) + var linkToB string = "Link: [To B](/b)" + if err := ts.UpdateNode("system", pageA.ID, pageA.Title, pageA.Slug, &linkToB); err != nil { + t.Fatalf("UpdateNode A failed: %v", err) } // Index - this creates a broken link since B doesn't exist @@ -888,9 +905,9 @@ func TestLinksStore_GetBrokenIncomingForPath_OnlyReturnsBrokenNotResolved(t *tes } // Now create Page B - this should heal the link - bIDPtr, err := ts.CreatePage("system", nil, "Page B", "b") + bIDPtr, err := ts.CreateNode("system", nil, "Page B", "b", pageNodeKind()) if err != nil { - t.Fatalf("CreatePage B failed: %v", err) + t.Fatalf("CreateNode B failed: %v", err) } pageBID := *bIDPtr @@ -898,8 +915,9 @@ func TestLinksStore_GetBrokenIncomingForPath_OnlyReturnsBrokenNotResolved(t *tes if err != nil { t.Fatalf("GetPage B failed: %v", err) } - if err := ts.UpdatePage("system", pageB.ID, pageB.Title, pageB.Slug, "# Page B"); err != nil { - t.Fatalf("UpdatePage B failed: %v", err) + var contentB string = "# Page B" + if err := ts.UpdateNode("system", pageB.ID, pageB.Title, pageB.Slug, &contentB); err != nil { + t.Fatalf("UpdateNode B failed: %v", err) } // Use HealLinksForExactPath to heal the broken link diff --git a/internal/search/bootstrap_test.go b/internal/search/bootstrap_test.go index 0a483272..1aaea37f 100644 --- a/internal/search/bootstrap_test.go +++ b/internal/search/bootstrap_test.go @@ -19,9 +19,10 @@ func TestBuildAndRunIndexer_BasicIndexing(t *testing.T) { t.Fatalf("failed to load tree: %v", err) } - _, err := treeSvc.CreatePage("system", nil, "Docs", "docs") + var pageNodeKind tree.NodeKind = "page" + _, err := treeSvc.CreateNode("system", nil, "Docs", "docs", &pageNodeKind) if err != nil { - t.Fatalf("CreatePage failed: %v", err) + t.Fatalf("CreateNode failed: %v", err) } mdPath := filepath.Join(tmp, "root", "docs.md") diff --git a/internal/search/sqlite_index.go b/internal/search/sqlite_index.go index ea5890b2..87b69445 100644 --- a/internal/search/sqlite_index.go +++ b/internal/search/sqlite_index.go @@ -9,6 +9,7 @@ import ( "sync" "github.com/microcosm-cc/bluemonday" + "github.com/perber/wiki/internal/core/markdown" "github.com/russross/blackfriday/v2" _ "modernc.org/sqlite" // Import SQLite driver ) @@ -158,7 +159,12 @@ func (s *SQLiteIndex) Close() error { return nil } -func (s *SQLiteIndex) IndexPage(path string, filePath string, pageID string, title string, content string) error { +func (s *SQLiteIndex) IndexPage(path string, filePath string, pageID string, title string, raw string) error { + _, content, _, err := markdown.ParseFrontmatter(raw) + if err != nil { + return err + } + // Headings extracted from the Markdown headings := extractHeadings(content) diff --git a/internal/test_utils/common.go b/internal/test_utils/common.go index 9218a3f1..0340d7e9 100644 --- a/internal/test_utils/common.go +++ b/internal/test_utils/common.go @@ -4,6 +4,9 @@ import ( "bytes" "fmt" "mime/multipart" + "os" + "path/filepath" + "testing" ) // CreateMultipartFile simulates a real file upload using multipart encoding @@ -34,3 +37,15 @@ func CreateMultipartFile(filename string, content []byte) (multipart.File, strin f, err := files[0].Open() return f, files[0].Filename, err } + +func WriteFile(t *testing.T, base, rel, content string) string { + t.Helper() + abs := filepath.Join(base, filepath.FromSlash(rel)) + if err := os.MkdirAll(filepath.Dir(abs), 0o755); err != nil { + t.Fatalf("mkdir: %v", err) + } + if err := os.WriteFile(abs, []byte(content), 0o644); err != nil { + t.Fatalf("write: %v", err) + } + return abs +} diff --git a/internal/wiki/wiki.go b/internal/wiki/wiki.go index a2583174..025bfd29 100644 --- a/internal/wiki/wiki.go +++ b/internal/wiki/wiki.go @@ -3,6 +3,7 @@ package wiki import ( "fmt" "log" + "log/slog" "mime/multipart" "path" "regexp" @@ -31,6 +32,7 @@ type Wiki struct { storageDir string searchWatcher *search.Watcher links *links.LinkService + log *slog.Logger } var emailRegex = regexp.MustCompile(`^[a-zA-Z0-9._%+\-]+@[a-zA-Z0-9.\-]+$`) @@ -65,6 +67,9 @@ type WikiOptions struct { } func NewWiki(options *WikiOptions) (*Wiki, error) { + + logger := slog.Default().With("component", "Wiki") + // Initialize the user store store, err := auth.NewUserStore(options.StorageDir) if err != nil { @@ -112,7 +117,7 @@ func NewWiki(options *WikiOptions) (*Wiki, error) { } linkService := links.NewLinkService(options.StorageDir, treeService, linksStore) if err := linkService.IndexAllPages(); err != nil { - log.Printf("failed to index links of pages: %v", err) + logger.Warn("failed to index links on startup", "error", err) } sqliteIndex, err := search.NewSQLiteIndex(options.StorageDir) @@ -128,18 +133,18 @@ func NewWiki(options *WikiOptions) (*Wiki, error) { go func() { err := search.BuildAndRunIndexer(treeService, sqliteIndex, path.Join(options.StorageDir, "root"), 4, status) if err != nil { - log.Printf("indexing failed: %v", err) + logger.Warn("indexing failed", "error", err) } }() // Start the file watcher for indexing searchWatcher, err = search.NewWatcher(path.Join(options.StorageDir, "root"), treeService, sqliteIndex, status) if err != nil { - log.Printf("failed to create file watcher: %v", err) + logger.Warn("failed to create file watcher", "error", err) } else { go func() { if err := searchWatcher.Start(); err != nil { - log.Printf("failed to start file watcher: %v", err) + logger.Warn("failed to start file watcher", "error", err) } }() } @@ -164,6 +169,7 @@ func NewWiki(options *WikiOptions) (*Wiki, error) { status: status, searchWatcher: searchWatcher, links: linkService, + log: logger, } // Ensure the welcome page exists @@ -175,16 +181,12 @@ func NewWiki(options *WikiOptions) (*Wiki, error) { } func (w *Wiki) EnsureWelcomePage() error { - _, err := w.tree.GetPage("root") - if err == nil { - return nil - } - if len(w.tree.GetTree().Children) > 0 { + w.log.Info("Welcome page already exists, skipping creation") return nil } - - p, err := w.CreatePage(SYSTEM_USER_ID, nil, "Welcome to LeafWiki", "welcome-to-leafwiki") + k := tree.NodeKindPage + p, err := w.CreatePage(SYSTEM_USER_ID, nil, "Welcome to LeafWiki", "welcome-to-leafwiki", &k) if err != nil { return err } @@ -192,10 +194,11 @@ func (w *Wiki) EnsureWelcomePage() error { // Set the content of the welcome page content := `# Welcome to LeafWiki! -LeafWiki is a lightweight, self-hosted knowledge base server for documenting -runbooks, internal docs, and technical knowledge using plain Markdown files. +LeafWiki – A fast wiki for people who think in folders, not feeds. +Single Go binary. Markdown on disk. No external database service. + +LeafWiki is a lightweight, self-hosted wiki for runbooks, internal docs, and technical notes — built for fast writing and explicit structure. It keeps your content as plain Markdown on disk and gives you fast navigation, search, and editing — without running additional services. -Content is stored directly on disk, organized in a clear tree structure, and served by a single Go binary. --- @@ -221,7 +224,7 @@ LeafWiki is designed for clarity, structure, and long-term maintainability — n - **Bold** ` + "- `Inline code` \n```\n\n" + "Enjoy writing!" - if _, err := w.UpdatePage(SYSTEM_USER_ID, p.ID, p.Title, p.Slug, content); err != nil { + if _, err := w.UpdatePage(SYSTEM_USER_ID, p.ID, p.Title, p.Slug, &content, &k); err != nil { return err } @@ -232,13 +235,25 @@ func (w *Wiki) GetTree() *tree.PageNode { return w.tree.GetTree() } -func (w *Wiki) CreatePage(userID string, parentID *string, title string, slug string) (*tree.Page, error) { +func (w *Wiki) TreeHash() string { + return w.tree.TreeHash() +} + +func (w *Wiki) CreatePage(userID string, parentID *string, title string, slug string, kind *tree.NodeKind) (*tree.Page, error) { ve := errors.NewValidationErrors() if title == "" { ve.Add("title", "Title must not be empty") } + if kind == nil { + ve.Add("kind", "Kind must be specified") + } + + if kind != nil && *kind != tree.NodeKindPage && *kind != tree.NodeKindSection { + ve.Add("kind", "Kind must be either 'page' or 'section'") + } + if err := w.slug.IsValidSlug(slug); err != nil { ve.Add("slug", err.Error()) } @@ -255,10 +270,20 @@ func (w *Wiki) CreatePage(userID string, parentID *string, title string, slug st return nil, err } } - - id, err := w.tree.CreatePage(userID, parentID, title, slug) - if err != nil { - return nil, err + var id *string + if *kind == tree.NodeKindPage { + var err error + id, err = w.tree.CreateNode(userID, parentID, title, slug, kind) + if err != nil { + return nil, err + } + } + if *kind == tree.NodeKindSection { + var err error + id, err = w.tree.CreateNode(userID, parentID, title, slug, kind) + if err != nil { + return nil, err + } } page, err := w.tree.GetPage(*id) @@ -275,7 +300,7 @@ func (w *Wiki) CreatePage(userID string, parentID *string, title string, slug st return page, nil } -func (w *Wiki) EnsurePath(userID string, targetPath string, targetTitle string) (*tree.Page, error) { +func (w *Wiki) EnsurePath(userID string, targetPath string, targetTitle string, kind *tree.NodeKind) (*tree.Page, error) { ve := errors.NewValidationErrors() cleanTargetPath := strings.Trim(strings.TrimSpace(targetPath), "/") @@ -316,7 +341,7 @@ func (w *Wiki) EnsurePath(userID string, targetPath string, targetTitle string) } // Now we create the missing segments - result, err := w.tree.EnsurePagePath(userID, cleanTargetPath, cleanTargetTitle) + result, err := w.tree.EnsurePagePath(userID, cleanTargetPath, cleanTargetTitle, kind) if err != nil { return nil, err } @@ -342,7 +367,7 @@ func (w *Wiki) EnsurePath(userID string, targetPath string, targetTitle string) return page, nil } -func (w *Wiki) UpdatePage(userID string, id, title, slug, content string) (*tree.Page, error) { +func (w *Wiki) UpdatePage(userID string, id, title, slug string, content *string, kind *tree.NodeKind) (*tree.Page, error) { // Validate the request ve := errors.NewValidationErrors() @@ -372,7 +397,7 @@ func (w *Wiki) UpdatePage(userID string, id, title, slug, content string) (*tree } } - if err = w.tree.UpdatePage(userID, id, title, slug, content); err != nil { + if err = w.tree.UpdateNode(userID, id, title, slug, content); err != nil { return nil, err } @@ -403,8 +428,10 @@ func (w *Wiki) UpdatePage(userID string, id, title, slug, content string) (*tree } } } else { - if err := w.links.UpdateLinksForPage(after, content); err != nil { - log.Printf("warning: failed to update links for page %s: %v", after.ID, err) + if content != nil { + if err := w.links.UpdateLinksForPage(after, *content); err != nil { + log.Printf("warning: failed to update links for page %s: %v", after.ID, err) + } } if err := w.links.HealLinksForExactPath(after); err != nil { log.Printf("warning: failed to heal links for page %s: %v", after.ID, err) @@ -435,13 +462,15 @@ func (w *Wiki) CopyPage(userID string, currentPageID string, targetParentID *str return nil, err } + kind := tree.NodeKindPage + // Create a copy of the page - copyID, err := w.tree.CreatePage(userID, targetParentID, title, slug) + copyID, err := w.tree.CreateNode(userID, targetParentID, title, slug, &kind) if err != nil { log.Printf("error: could not create page copy: %v", err) return nil, err } - cleanup := func() { _ = w.tree.DeletePage(userID, *copyID, false) } + cleanup := func() { _ = w.tree.DeleteNode(userID, *copyID, false) } // Get the copied page copy, err := w.tree.GetPage(*copyID) @@ -462,7 +491,7 @@ func (w *Wiki) CopyPage(userID string, currentPageID string, targetParentID *str updatedContent := strings.ReplaceAll(page.Content, "/assets/"+page.ID+"/", "/assets/"+copy.ID+"/") // Write the content to the copied page - if err := w.tree.UpdatePage(userID, copy.ID, copy.Title, copy.Slug, updatedContent); err != nil { + if err := w.tree.UpdateNode(userID, copy.ID, copy.Title, copy.Slug, &updatedContent); err != nil { log.Printf("error: could not update copied page content: %v", err) cleanup() _ = w.asset.DeleteAllAssetsForPage(copy.PageNode) @@ -514,7 +543,7 @@ func (w *Wiki) DeletePage(userID string, id string, recursive bool) error { oldPrefix = page.CalculatePath() } - if err := w.tree.DeletePage(userID, id, recursive); err != nil { + if err := w.tree.DeleteNode(userID, id, recursive); err != nil { log.Printf("error: could not delete page: %v", err) return err } @@ -542,7 +571,7 @@ func (w *Wiki) DeletePage(userID string, id string, recursive bool) error { return nil } - if err := w.tree.DeletePage(userID, id, recursive); err != nil { + if err := w.tree.DeleteNode(userID, id, recursive); err != nil { log.Printf("error: could not delete page: %v", err) return err } @@ -595,7 +624,7 @@ func (w *Wiki) MovePage(userID, id, parentID string) error { oldPrefix = p.CalculatePath() } } - if err := w.tree.MovePage(userID, id, parentID); err != nil { + if err := w.tree.MoveNode(userID, id, parentID); err != nil { return err } @@ -631,6 +660,19 @@ func (w *Wiki) MovePage(userID, id, parentID string) error { return nil } +func (w *Wiki) ConvertPage(userID, id string, targetKind tree.NodeKind) error { + if id == "root" || id == "" { + return fmt.Errorf("cannot convert root page") + } + + err := w.tree.ConvertNode(userID, id, targetKind) + if err != nil { + return err + } + + return nil +} + func (w *Wiki) SortPages(parentID string, orderedIDs []string) error { return w.tree.SortPages(parentID, orderedIDs) } @@ -981,3 +1023,7 @@ func (w *Wiki) DeleteBrandingFavicon() error { func (w *Wiki) GetBrandingService() *branding.BrandingService { return w.branding } + +func (w *Wiki) GetSlugService() *tree.SlugService { + return w.slug +} diff --git a/internal/wiki/wiki_test.go b/internal/wiki/wiki_test.go index 02fee30d..fffe0a1c 100644 --- a/internal/wiki/wiki_test.go +++ b/internal/wiki/wiki_test.go @@ -23,11 +23,16 @@ func createWikiTestInstance(t *testing.T) *Wiki { return wikiInstance } +func pageNodeKind() *tree.NodeKind { + kind := tree.NodeKindPage + return &kind +} + func TestWiki_CreatePage_Root(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - page, err := w.CreatePage("system", nil, "Home", "home") + page, err := w.CreatePage("system", nil, "Home", "home", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) } @@ -40,9 +45,10 @@ func TestWiki_CreatePage_Root(t *testing.T) { func TestWiki_CreatePage_WithParent(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - rootPage, _ := w.CreatePage("system", nil, "Docs", "docs") + kind := tree.NodeKindPage + rootPage, _ := w.CreatePage("system", nil, "Docs", "docs", &kind) - page, err := w.CreatePage("system", &rootPage.ID, "API-Doc", "api-doc") + page, err := w.CreatePage("system", &rootPage.ID, "API-Doc", "api-doc", &kind) if err != nil { t.Fatalf("CreatePage with parent failed: %v", err) } @@ -55,7 +61,7 @@ func TestWiki_CreatePage_WithParent(t *testing.T) { func TestWiki_CreatePage_EmptyTitle(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - _, err := w.CreatePage("system", nil, "", "empty") + _, err := w.CreatePage("system", nil, "", "empty", pageNodeKind()) if err == nil { t.Error("Expected error for empty title, got none") } @@ -64,7 +70,7 @@ func TestWiki_CreatePage_EmptyTitle(t *testing.T) { func TestWiki_CreatePage_ReservedSlug(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - _, err := w.CreatePage("system", nil, "Reserved", "e") + _, err := w.CreatePage("system", nil, "Reserved", "e", pageNodeKind()) if err == nil { t.Error("Expected error for reserved slug, got none") } @@ -82,9 +88,9 @@ func TestWiki_CreatePage_ReservedSlug(t *testing.T) { func TestWiki_CreatePage_PageExists(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - _, _ = w.CreatePage("system", nil, "Duplicate", "duplicate") + _, _ = w.CreatePage("system", nil, "Duplicate", "duplicate", pageNodeKind()) - _, err := w.CreatePage("system", nil, "Duplicate", "duplicate") + _, err := w.CreatePage("system", nil, "Duplicate", "duplicate", pageNodeKind()) if err == nil { t.Error("Expected error for duplicate page, got none") } @@ -94,7 +100,7 @@ func TestWiki_CreatePage_InvalidParent(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() invalidID := "not-real" - _, err := w.CreatePage("system", &invalidID, "Broken", "broken") + _, err := w.CreatePage("system", &invalidID, "Broken", "broken", pageNodeKind()) if err == nil { t.Error("Expected error with invalid parent ID, got none") } @@ -103,7 +109,7 @@ func TestWiki_CreatePage_InvalidParent(t *testing.T) { func TestWiki_GetPage_ValidID(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - page, _ := w.CreatePage("system", nil, "ReadMe", "readme") + page, _ := w.CreatePage("system", nil, "ReadMe", "readme", pageNodeKind()) found, err := w.GetPage(page.ID) if err != nil { @@ -127,8 +133,8 @@ func TestWiki_GetPage_InvalidID(t *testing.T) { func TestWiki_MovePage_Valid(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - parent, _ := w.CreatePage("system", nil, "Projects", "projects") - child, _ := w.CreatePage("system", nil, "Old", "old") + parent, _ := w.CreatePage("system", nil, "Projects", "projects", pageNodeKind()) + child, _ := w.CreatePage("system", nil, "Old", "old", pageNodeKind()) err := w.MovePage("system", child.ID, parent.ID) if err != nil { @@ -139,8 +145,7 @@ func TestWiki_MovePage_Valid(t *testing.T) { func TestWiki_DeletePage_Simple(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - page, _ := w.CreatePage("system", nil, "Trash", "trash") - + page, _ := w.CreatePage("system", nil, "Trash", "trash", pageNodeKind()) err := w.DeletePage("system", page.ID, false) if err != nil { t.Fatalf("DeletePage failed: %v", err) @@ -150,8 +155,8 @@ func TestWiki_DeletePage_Simple(t *testing.T) { func TestWiki_DeletePage_WithChildren(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - parent, _ := w.CreatePage("system", nil, "Parent", "parent") - _, _ = w.CreatePage("system", &parent.ID, "Child", "child") + parent, _ := w.CreatePage("system", nil, "Parent", "parent", pageNodeKind()) + _, _ = w.CreatePage("system", &parent.ID, "Child", "child", pageNodeKind()) err := w.DeletePage("system", parent.ID, false) if err == nil { @@ -162,8 +167,8 @@ func TestWiki_DeletePage_WithChildren(t *testing.T) { func TestWiki_DeletePage_Recursive(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - parent, _ := w.CreatePage("system", nil, "Parent", "parent") - _, _ = w.CreatePage("system", &parent.ID, "Child", "child") + parent, _ := w.CreatePage("system", nil, "Parent", "parent", pageNodeKind()) + _, _ = w.CreatePage("system", &parent.ID, "Child", "child", pageNodeKind()) err := w.DeletePage("system", parent.ID, true) if err != nil { @@ -205,9 +210,9 @@ func TestWiki_UpdatePage(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - page, _ := w.CreatePage("system", nil, "Draft", "draft") - - page, err := w.UpdatePage("system", page.ID, "Final", "final", "# Updated") + page, _ := w.CreatePage("system", nil, "Draft", "draft", pageNodeKind()) + var updatedstr string = "# Updated" + page, err := w.UpdatePage("system", page.ID, "Final", "final", &updatedstr, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage failed: %v", err) } @@ -234,7 +239,7 @@ func TestWiki_SuggestSlug_Conflict(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() root := w.GetTree() - _, err := w.CreatePage("system", nil, "My Page", "my-page") + _, err := w.CreatePage("system", nil, "My Page", "my-page", pageNodeKind()) if err != nil { t.Fatalf("CreatePage failed: %v", err) @@ -254,14 +259,14 @@ func TestWiki_SuggestSlug_DeepHierarchy(t *testing.T) { defer w.Close() // create a deep hierarchy of pages (Architecture -> Backend) - _, err := w.CreatePage("system", nil, "Architecture", "architecture") + _, err := w.CreatePage("system", nil, "Architecture", "architecture", pageNodeKind()) if err != nil { t.Fatalf("Failed to create 'Architecture': %v", err) } root := w.GetTree() arch := root.Children[0] - _, err = w.CreatePage("system", &arch.ID, "Backend", "backend") + _, err = w.CreatePage("system", &arch.ID, "Backend", "backend", pageNodeKind()) if err != nil { t.Fatalf("Failed to create 'Backend': %v", err) } @@ -278,7 +283,7 @@ func TestWiki_SuggestSlug_DeepHierarchy(t *testing.T) { } // Create a second one with the same name → it must be numbered - _, err = w.CreatePage("system", &backend.ID, "Data Layer", "data-layer") + _, err = w.CreatePage("system", &backend.ID, "Data Layer", "data-layer", pageNodeKind()) if err != nil { t.Fatalf("Failed to create 'Data Layer': %v", err) } @@ -296,7 +301,7 @@ func TestWiki_SuggestSlug_DeepHierarchy(t *testing.T) { func TestWiki_FindByPath_Valid(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - _, _ = w.CreatePage("system", nil, "Company", "company") + _, _ = w.CreatePage("system", nil, "Company", "company", pageNodeKind()) found, err := w.FindByPath("company") if err != nil { @@ -319,9 +324,9 @@ func TestWiki_FindByPath_Invalid(t *testing.T) { func TestWiki_SortPages(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - parent, _ := w.CreatePage("system", nil, "Parent", "parent") - child1, _ := w.CreatePage("system", &parent.ID, "Child1", "child1") - child2, _ := w.CreatePage("system", &parent.ID, "Child2", "child2") + parent, _ := w.CreatePage("system", nil, "Parent", "parent", pageNodeKind()) + child1, _ := w.CreatePage("system", &parent.ID, "Child1", "child1", pageNodeKind()) + child2, _ := w.CreatePage("system", &parent.ID, "Child2", "child2", pageNodeKind()) err := w.SortPages(parent.ID, []string{child2.ID, child1.ID}) if err != nil { @@ -338,7 +343,7 @@ func TestWiki_SortPages(t *testing.T) { func TestWiki_CopyPages(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - original, _ := w.CreatePage("system", nil, "Original", "original") + original, _ := w.CreatePage("system", nil, "Original", "original", pageNodeKind()) copied, err := w.CopyPage("system", original.ID, nil, "Copy of Original", "copy-of-original") if err != nil { @@ -359,8 +364,8 @@ func TestWiki_CopyPages(t *testing.T) { func TestWiki_CopyPages_WithParent(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - parent, _ := w.CreatePage("system", nil, "Parent", "parent") - original, _ := w.CreatePage("system", nil, "Original", "original") + parent, _ := w.CreatePage("system", nil, "Parent", "parent", pageNodeKind()) + original, _ := w.CreatePage("system", nil, "Original", "original", pageNodeKind()) copied, err := w.CopyPage("system", original.ID, &parent.ID, "Copy of Original", "copy-of-original") if err != nil { @@ -384,7 +389,7 @@ func TestWiki_CopyPages_NonExistentSource(t *testing.T) { func TestWiki_CopyPages_WithAssets(t *testing.T) { w := createWikiTestInstance(t) defer w.Close() - original, _ := w.CreatePage("system", nil, "Original", "original") + original, _ := w.CreatePage("system", nil, "Original", "original", pageNodeKind()) originalNode := tree.PageNode{ ID: original.ID, @@ -454,12 +459,13 @@ func TestWiki_EnsurePath_HealsLinksForAllCreatedSegments(t *testing.T) { defer w.Close() // 1) Create page A with links to /x and /x/y (both non-existing) - pageA, err := w.CreatePage("system", nil, "Page A", "a") + pageA, err := w.CreatePage("system", nil, "Page A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage A failed: %v", err) } - _, err = w.UpdatePage("system", pageA.ID, pageA.Title, pageA.Slug, "Links: [X](/x) and [XY](/x/y)") + var contentA string = "Links: [X](/x) and [XY](/x/y)" + _, err = w.UpdatePage("system", pageA.ID, pageA.Title, pageA.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage A failed: %v", err) } @@ -489,7 +495,7 @@ func TestWiki_EnsurePath_HealsLinksForAllCreatedSegments(t *testing.T) { } // 3) EnsurePath creates /x and /x/y and triggers Heal for all newly created segments - _, err = w.EnsurePath("system", "/x/y", "X Y") + _, err = w.EnsurePath("system", "/x/y", "X Y", pageNodeKind()) if err != nil { t.Fatalf("EnsurePath failed: %v", err) } @@ -548,21 +554,25 @@ func TestWiki_DeletePage_NonRecursive_MarksIncomingBroken(t *testing.T) { defer w.Close() // Create A with link to /b - a, err := w.CreatePage("system", nil, "Page A", "a") + a, err := w.CreatePage("system", nil, "Page A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage A failed: %v", err) } - _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, "Link to B: [Go](/b)") + + var contentA string = "Link to B: [Go](/b)" + _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage A failed: %v", err) } // Create B - b, err := w.CreatePage("system", nil, "Page B", "b") + b, err := w.CreatePage("system", nil, "Page B", "b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage B failed: %v", err) } - _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, "# Page B") + + var contentB string = "# Page B" + _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, &contentB, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage B failed: %v", err) } @@ -612,37 +622,40 @@ func TestWiki_DeletePage_Recursive_RemovesOutgoingForSubtree_AndBreaksIncomingBy defer w.Close() // Create /docs - docs, err := w.CreatePage("system", nil, "Docs", "docs") + docs, err := w.CreatePage("system", nil, "Docs", "docs", pageNodeKind()) if err != nil { t.Fatalf("CreatePage docs failed: %v", err) } // Create /docs/a and /docs/b - a, err := w.CreatePage("system", &docs.ID, "A", "a") + a, err := w.CreatePage("system", &docs.ID, "A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage a failed: %v", err) } - b, err := w.CreatePage("system", &docs.ID, "B", "b") + b, err := w.CreatePage("system", &docs.ID, "B", "b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage b failed: %v", err) } // A links to B inside subtree - _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, "Link to B: [B](/docs/b)") + var contentA string = "Link to B: [B](/docs/b)" + _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage a failed: %v", err) } - _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, "# B") + var contentB string = "# B" + _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, &contentB, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage b failed: %v", err) } // Create survivor /c with incoming link into subtree - c, err := w.CreatePage("system", nil, "C", "c") + c, err := w.CreatePage("system", nil, "C", "c", pageNodeKind()) if err != nil { t.Fatalf("CreatePage c failed: %v", err) } - _, err = w.UpdatePage("system", c.ID, c.Title, c.Slug, "Incoming link: [B](/docs/b)") + var contentC string = "Incoming link: [B](/docs/b)" + _, err = w.UpdatePage("system", c.ID, c.Title, c.Slug, &contentC, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage c failed: %v", err) } @@ -700,21 +713,23 @@ func TestWiki_RenamePage_MarksOldBroken_HealsNewExactPath(t *testing.T) { defer w.Close() // Create A with links to /b (exists) and /b2 (does not exist yet) - a, err := w.CreatePage("system", nil, "A", "a") + a, err := w.CreatePage("system", nil, "A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage A failed: %v", err) } - _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, "Links: [B](/b) and [B2](/b2)") + var contentA string = "Links: [B](/b) and [B2](/b2)" + _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage A failed: %v", err) } // Create B at /b - b, err := w.CreatePage("system", nil, "B", "b") + b, err := w.CreatePage("system", nil, "B", "b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage B failed: %v", err) } - _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, "# B") + var contentB string = "# B" + _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, &contentB, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage B failed: %v", err) } @@ -751,7 +766,8 @@ func TestWiki_RenamePage_MarksOldBroken_HealsNewExactPath(t *testing.T) { } // Rename B: /b -> /b2 - _, err = w.UpdatePage("system", b.ID, b.Title, "b2", "# B (renamed)") + var contentB2 string = "# B (renamed)" + _, err = w.UpdatePage("system", b.ID, b.Title, "b2", &contentB2, pageNodeKind()) if err != nil { t.Fatalf("Rename B failed: %v", err) } @@ -798,25 +814,27 @@ func TestWiki_RenameSubtree_BreaksOldPrefix_HealsNewSubpaths(t *testing.T) { defer w.Close() // Create subtree: /docs/b - docs, err := w.CreatePage("system", nil, "Docs", "docs") + docs, err := w.CreatePage("system", nil, "Docs", "docs", pageNodeKind()) if err != nil { t.Fatalf("CreatePage docs failed: %v", err) } - b, err := w.CreatePage("system", &docs.ID, "B", "b") + b, err := w.CreatePage("system", &docs.ID, "B", "b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage /docs/b failed: %v", err) } - _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, "# B") + var contentB string = "# B" + _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, &contentB, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage B failed: %v", err) } // Create A that links to old and future new subtree paths - a, err := w.CreatePage("system", nil, "A", "a") + a, err := w.CreatePage("system", nil, "A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage A failed: %v", err) } - _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, "Links: [Old](/docs/b) and [New](/docs2/b)") + var contentA string = "Links: [Old](/docs/b) and [New](/docs2/b)" + _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage A failed: %v", err) } @@ -835,7 +853,9 @@ func TestWiki_RenameSubtree_BreaksOldPrefix_HealsNewSubpaths(t *testing.T) { } // Rename /docs -> /docs2 - _, err = w.UpdatePage("system", docs.ID, docs.Title, "docs2", "# Docs") + var contentDocs2 string = "# Docs" + nodeSection := tree.NodeKindSection + _, err = w.UpdatePage("system", docs.ID, docs.Title, "docs2", &contentDocs2, &nodeSection) if err != nil { t.Fatalf("Rename docs failed: %v", err) } @@ -882,27 +902,29 @@ func TestWiki_MovePage_MarksOldBroken_HealsNewExactPath(t *testing.T) { defer w.Close() // Create A that links to /b (old path) and /projects/b (future path) - a, err := w.CreatePage("system", nil, "A", "a") + a, err := w.CreatePage("system", nil, "A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage A failed: %v", err) } - _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, "Links: [B](/b) and [B2](/projects/b)") + var contentA string = "Links: [B](/b) and [B2](/projects/b)" + _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage A failed: %v", err) } // Create B at /b - b, err := w.CreatePage("system", nil, "B", "b") + b, err := w.CreatePage("system", nil, "B", "b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage B failed: %v", err) } - _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, "# B") + var contentB string = "# B" + _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, &contentB, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage B failed: %v", err) } // Create parent /projects (target) - projects, err := w.CreatePage("system", nil, "Projects", "projects") + projects, err := w.CreatePage("system", nil, "Projects", "projects", pageNodeKind()) if err != nil { t.Fatalf("CreatePage projects failed: %v", err) } @@ -980,31 +1002,33 @@ func TestWiki_MoveSubtree_BreaksOldPrefix_HealsNewSubpaths(t *testing.T) { defer w.Close() // Create subtree /docs/b - docs, err := w.CreatePage("system", nil, "Docs", "docs") + docs, err := w.CreatePage("system", nil, "Docs", "docs", pageNodeKind()) if err != nil { t.Fatalf("CreatePage docs failed: %v", err) } - b, err := w.CreatePage("system", &docs.ID, "B", "b") + b, err := w.CreatePage("system", &docs.ID, "B", "b", pageNodeKind()) if err != nil { t.Fatalf("CreatePage /docs/b failed: %v", err) } - _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, "# B") + var contentB string = "# B" + _, err = w.UpdatePage("system", b.ID, b.Title, b.Slug, &contentB, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage B failed: %v", err) } // Create target parent /archive - archive, err := w.CreatePage("system", nil, "Archive", "archive") + archive, err := w.CreatePage("system", nil, "Archive", "archive", pageNodeKind()) if err != nil { t.Fatalf("CreatePage archive failed: %v", err) } // Create A that links to old and future new subtree paths - a, err := w.CreatePage("system", nil, "A", "a") + a, err := w.CreatePage("system", nil, "A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage A failed: %v", err) } - _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, "Links: [Old](/docs/b) and [New](/archive/docs/b)") + var contentA string = "Links: [Old](/docs/b) and [New](/archive/docs/b)" + _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage A failed: %v", err) } @@ -1053,41 +1077,44 @@ func TestWiki_MovePage_ReindexesRelativeLinks(t *testing.T) { defer w.Close() // Create /docs with /docs/shared and /docs/a - docs, err := w.CreatePage("system", nil, "Docs", "docs") + docs, err := w.CreatePage("system", nil, "Docs", "docs", pageNodeKind()) if err != nil { t.Fatalf("CreatePage docs failed: %v", err) } - docsShared, err := w.CreatePage("system", &docs.ID, "Shared", "shared") + docsShared, err := w.CreatePage("system", &docs.ID, "Shared", "shared", pageNodeKind()) if err != nil { t.Fatalf("CreatePage /docs/shared failed: %v", err) } - _, err = w.UpdatePage("system", docsShared.ID, docsShared.Title, docsShared.Slug, "# Docs Shared") + var contentDocsShared string = "# Docs Shared" + _, err = w.UpdatePage("system", docsShared.ID, docsShared.Title, docsShared.Slug, &contentDocsShared, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage /docs/shared failed: %v", err) } - a, err := w.CreatePage("system", &docs.ID, "A", "a") + a, err := w.CreatePage("system", &docs.ID, "A", "a", pageNodeKind()) if err != nil { t.Fatalf("CreatePage /docs/a failed: %v", err) } // Important: relative link - _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, "Relative: [S](../shared)") + var contentA string = "Relative: [S](../shared)" + _, err = w.UpdatePage("system", a.ID, a.Title, a.Slug, &contentA, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage /docs/a failed: %v", err) } // Create /guide with /guide/shared (different page!) - guide, err := w.CreatePage("system", nil, "Guide", "guide") + guide, err := w.CreatePage("system", nil, "Guide", "guide", pageNodeKind()) if err != nil { t.Fatalf("CreatePage guide failed: %v", err) } - guideShared, err := w.CreatePage("system", &guide.ID, "Shared", "shared") + guideShared, err := w.CreatePage("system", &guide.ID, "Shared", "shared", pageNodeKind()) if err != nil { t.Fatalf("CreatePage /guide/shared failed: %v", err) } - _, err = w.UpdatePage("system", guideShared.ID, guideShared.Title, guideShared.Slug, "# Guide Shared") + var contentGuideShared string = "# Guide Shared" + _, err = w.UpdatePage("system", guideShared.ID, guideShared.Title, guideShared.Slug, &contentGuideShared, pageNodeKind()) if err != nil { t.Fatalf("UpdatePage /guide/shared failed: %v", err) } @@ -1226,7 +1253,7 @@ func TestWiki_AuthDisabled_CoreFunctionalityWorks(t *testing.T) { defer wikiInstance.Close() // Test creating a page - page, err := wikiInstance.CreatePage("system", nil, "Test Page", "test-page") + page, err := wikiInstance.CreatePage("system", nil, "Test Page", "test-page", pageNodeKind()) if err != nil { t.Fatalf("Failed to create page with AuthDisabled: %v", err) } @@ -1236,7 +1263,8 @@ func TestWiki_AuthDisabled_CoreFunctionalityWorks(t *testing.T) { } // Test updating a page - updatedPage, err := wikiInstance.UpdatePage("system", page.ID, "Updated Title", "updated-slug", "# Content") + var updatedContent string = "# Content" + updatedPage, err := wikiInstance.UpdatePage("system", page.ID, "Updated Title", "updated-slug", &updatedContent, pageNodeKind()) if err != nil { t.Fatalf("Failed to update page with AuthDisabled: %v", err) } diff --git a/readme.md b/readme.md index 8589f8a9..cb5f0c90 100644 --- a/readme.md +++ b/readme.md @@ -392,6 +392,73 @@ python3 tools/generate-tree.py --root data/root --output data/tree.json --- +## Reconstruct Tree from Filesystem + +LeafWiki includes a built-in `reconstruct-tree` command that rebuilds the navigation tree (`tree.json`) by scanning the actual Markdown files and folders on disk. + +**Usage:** + +```bash +leafwiki [--data-dir ] reconstruct-tree +``` + +Or if you installed LeafWiki as a binary: + +```bash +./leafwiki [--data-dir ] reconstruct-tree +``` + +**What it does:** + +The command: +- Scans the `data/root` directory recursively +- Extracts page titles from Markdown files (from H1 headings or frontmatter) +- Preserves `leafwiki_id` values from frontmatter when present +- Generates new IDs for pages without frontmatter IDs +- Rebuilds the complete navigation tree structure +- Saves the new `tree.json` and updates `schema.json` + +**Use cases:** + +1. **Recovery from corrupted tree.json** + If your `tree.json` becomes corrupted or deleted, this command reconstructs it from your existing Markdown files. + +2. **Manual filesystem changes** + If you've added, moved, or renamed Markdown files directly on disk (outside LeafWiki's UI), run this command to sync the navigation tree. + +3. **Migration and import** + When importing existing Markdown content into LeafWiki, use this command to automatically generate the navigation structure. + +4. **Tree structure reset** + If the tree structure becomes inconsistent with the filesystem, this provides a clean rebuild based on actual file layout. + +**Important notes:** + +- ⚠️ This command **replaces the entire tree structure**. Any custom ordering or metadata in `tree.json` will be lost. +- The command creates a **deterministic, alphabetically-sorted** tree based on file and folder names. +- Page content (Markdown files) is never modified—only the navigation structure is rebuilt. +- Frontmatter `leafwiki_id` values are preserved when present, maintaining page identity and internal links. +- For folders (sections), the command looks for `index.md` to extract the section title. +- Files and folders starting with `.` (hidden) are automatically skipped. + +**Example:** + +```bash +# Default data directory (./data) +leafwiki reconstruct-tree + +# Custom data directory +leafwiki --data-dir /path/to/data reconstruct-tree +``` + +**Before running this command:** + +- Ensure your data directory exists and contains a `root` folder with your Markdown content +- Consider backing up your current `tree.json` if you need to preserve custom ordering +- The server does not need to be running—this is a standalone command + +--- + ## Quick Start (Dev) ``` diff --git a/ui/leafwiki-ui/src/components/UserToolbar.tsx b/ui/leafwiki-ui/src/components/UserToolbar.tsx index 8d7197b9..06502e66 100644 --- a/ui/leafwiki-ui/src/components/UserToolbar.tsx +++ b/ui/leafwiki-ui/src/components/UserToolbar.tsx @@ -75,6 +75,12 @@ export default function UserToolbar() { > Branding Settings + navigate('/settings/importer')} + > + Import + -
-

Branding Settings

-
-

Site Name

-

+

+

Branding Settings

+
+

Site Name

+

The name displayed in the header, page titles, and login screen.

-
+
-
-

Logo

-

+

+

Logo

+

The logo displayed in the header next to the site name.

-
- - Current Logo: - +
+ Current Logo: {logoFile ? ( <> Logo
-
+
{' '} Upload Image -

+

Accepts {logoExts.map((ext) => ext.toUpperCase()).join(', ')}, max size {(maxLogoSize / (1024 * 1024)).toFixed(1)} MB

-
-

Favicon

-

+

+

Favicon

+

The icon displayed in the browser tab.

-
- - Current Favicon: - +
+ Current Favicon: {faviconFile ? ( <> {' '} Favicon
-
+
Upload Favicon -

+

Accepts {faviconExts.map((ext) => ext.toUpperCase()).join(', ')}, max size {(maxFaviconSize / (1024 * 1024)).toFixed(1)} MB

-
+
+
+ +
+ {importPlan && ( +
+

Import Plan

+ +
+
+ + + + + + + + + + + +
ID:{importPlan.id}
+ Tree Hash: + {importPlan.tree_hash}
+
+
+
+ )} + {importPlan && importPlan.items.length > 0 && ( + <> +
+

+ Items ({importPlan.items.length}) +

+
+
+
+ + + + + + + + + + + + + {importPlan.items.map((item) => ( + + + + + + + + + ))} + +
+ Source Path + + Target Path + TitleKind + Action + Notes
+ {item.source_path} + + {item.target_path} + + {item.title} + + + {item.kind} + + + + {item.action} + + + {item.notes ? item.notes.join(', ') : ''} +
+
+
+
+ +
+ + )} + {importResult && ( +
+

Import Result

+ +
+
+ + + + + + + + + + + + + + + +
+ Imported Count: + {importResult.imported_count}
+ Updated Count: + {importResult.updated_count}
+ Skipped Count: + {importResult.skipped_count}
+
+
+
+ )} + {importResult && importResult.items.length > 0 && ( +
+

+ Result Items ({importResult.items.length}) +

+
+
+ + + + + + + + + + + {importResult.items.map((item) => ( + + + + + + + ))} + +
+ Source Path + + Target Path + ActionError
+ {item.source_path} + + {item.target_path} + + + {item.action} + + + {item.error ?? ''} +
+
+
+
+ )} +
+ + ) +} diff --git a/ui/leafwiki-ui/src/features/importer/useToolbarActions.tsx b/ui/leafwiki-ui/src/features/importer/useToolbarActions.tsx new file mode 100644 index 00000000..6e129321 --- /dev/null +++ b/ui/leafwiki-ui/src/features/importer/useToolbarActions.tsx @@ -0,0 +1,13 @@ +// Hook to provide toolbar actions for the page viewer + +import { useEffect } from 'react' +import { useToolbarStore } from '../toolbar/toolbar' + +// Hook to set up toolbar actions based on app mode and read-only status +export function useToolbarActions() { + const setButtons = useToolbarStore((state) => state.setButtons) + + useEffect(() => { + setButtons([]) + }, [setButtons]) +} diff --git a/ui/leafwiki-ui/src/features/page/AddPageDialog.tsx b/ui/leafwiki-ui/src/features/page/AddPageDialog.tsx index dec3b4ba..765921c9 100644 --- a/ui/leafwiki-ui/src/features/page/AddPageDialog.tsx +++ b/ui/leafwiki-ui/src/features/page/AddPageDialog.tsx @@ -1,20 +1,24 @@ -import BaseDialog from '@/components/BaseDialog' +import BaseDialog, { BaseDialogConfirmButton } from '@/components/BaseDialog' import { FormInput } from '@/components/FormInput' -import { createPage } from '@/lib/api/pages' +import { createPage, NODE_KIND_PAGE } from '@/lib/api/pages' import { handleFieldErrors } from '@/lib/handleFieldErrors' import { DIALOG_ADD_PAGE } from '@/lib/registries' import { buildEditUrl } from '@/lib/urlUtil' import { useTreeStore } from '@/stores/tree' -import { useCallback, useState } from 'react' +import { useCallback, useMemo, useState } from 'react' import { useNavigate } from 'react-router-dom' import { toast } from 'sonner' import { SlugInputWithSuggestion } from './SlugInputWithSuggestion' type AddPageDialogProps = { parentId: string + nodeKind?: 'page' | 'section' } -export function AddPageDialog({ parentId }: AddPageDialogProps) { +export function AddPageDialog({ + parentId, + nodeKind = NODE_KIND_PAGE, +}: AddPageDialogProps) { const [title, setTitle] = useState('') const [slug, setSlug] = useState('') const [loading, setLoading] = useState(false) @@ -52,7 +56,11 @@ export function AddPageDialog({ parentId }: AddPageDialogProps) { }, []) const handleCreate = useCallback( - async (redirect: boolean = true): Promise => { + async ( + redirect: boolean = true, + nodeKind?: 'page' | 'section', + ): Promise => { + if (!nodeKind) nodeKind = NODE_KIND_PAGE // Default to 'page' if not provided if (!title) return false // Should not happen due to button disabling if (!slug) { @@ -68,7 +76,7 @@ export function AddPageDialog({ parentId }: AddPageDialogProps) { setLoading(true) setFieldErrors({}) try { - await createPage({ title, slug, parentId }) + await createPage({ title, slug, parentId, kind: nodeKind }) toast.success('Page created') await reloadTree() if (redirect) { @@ -104,14 +112,44 @@ export function AddPageDialog({ parentId }: AddPageDialogProps) { return true }, [resetForm]) + const buttons = useMemo(() => { + const b: BaseDialogConfirmButton[] = [ + { + label: 'Create', + actionType: 'no-redirect', + autoFocus: true, + loading, + disabled: isCreateButtonDisabled, + variant: nodeKind === NODE_KIND_PAGE ? 'secondary' : 'default', + }, + ] + if (nodeKind === NODE_KIND_PAGE) { + b.push({ + label: 'Create & Edit Page', + actionType: 'confirm', + autoFocus: false, + loading, + disabled: isCreateButtonDisabled, + variant: 'default', + }) + } + return b + }, [isCreateButtonDisabled, loading, nodeKind]) + return ( => { - return await handleCreate(actionType !== 'no-redirect') + return await handleCreate(actionType !== 'no-redirect', nodeKind) }} testidPrefix="add-page-dialog" cancelButton={{ @@ -120,24 +158,7 @@ export function AddPageDialog({ parentId }: AddPageDialogProps) { disabled: loading, autoFocus: false, }} - buttons={[ - { - label: 'Create', - actionType: 'no-redirect', - autoFocus: true, - loading, - disabled: isCreateButtonDisabled, - variant: 'secondary', - }, - { - label: 'Create & Edit Page', - actionType: 'confirm', - autoFocus: false, - loading, - disabled: isCreateButtonDisabled, - variant: 'default', - }, - ]} + buttons={buttons} >
), }, + { + path: '/settings/importer', + element: isReadOnlyViewer ? ( + + ) : ( + + + + ), + }, { path: '/e/*', element: isReadOnlyViewer ? ( diff --git a/ui/leafwiki-ui/src/features/tree/TreeNode.tsx b/ui/leafwiki-ui/src/features/tree/TreeNode.tsx index 473ffffa..2f4595b9 100644 --- a/ui/leafwiki-ui/src/features/tree/TreeNode.tsx +++ b/ui/leafwiki-ui/src/features/tree/TreeNode.tsx @@ -1,11 +1,7 @@ import { TooltipWrapper } from '@/components/TooltipWrapper' import { TreeViewActionButton } from '@/features/tree/TreeViewActionButton' -import { PageNode } from '@/lib/api/pages' -import { - DIALOG_ADD_PAGE, - DIALOG_MOVE_PAGE, - DIALOG_SORT_PAGES, -} from '@/lib/registries' +import { NODE_KIND_SECTION, PageNode } from '@/lib/api/pages' +import { DIALOG_ADD_PAGE } from '@/lib/registries' import { buildEditUrl, buildViewUrl } from '@/lib/urlUtil' import { useAppMode } from '@/lib/useAppMode' import { useIsMobile } from '@/lib/useIsMobile' @@ -13,9 +9,10 @@ import { useIsReadOnly } from '@/lib/useIsReadOnly' import { useDialogsStore } from '@/stores/dialogs' import { useTreeStore } from '@/stores/tree' import clsx from 'clsx' -import { ChevronUp, List, Move, Plus } from 'lucide-react' -import React, { useState } from 'react' +import { ChevronUp, FilePlus } from 'lucide-react' +import React from 'react' import { Link, useLocation } from 'react-router-dom' +import TreeNodeActionsMenu from './TreeNodeActionsMenu' type Props = { node: PageNode @@ -29,7 +26,6 @@ export const TreeNode = React.memo(function TreeNode({ const { isNodeOpen, toggleNode } = useTreeStore() const appMode = useAppMode() const hasChildren = node.children && node.children.length > 0 - const [hovered, setHovered] = useState(false) const { pathname } = useLocation() const currentPath = @@ -58,6 +54,22 @@ export const TreeNode = React.memo(function TreeNode({ to={`/${node.path}`} className="tree-node__link" data-testid={`tree-node-link-${node.id}`} + onClick={(e) => { + // Only toggle sections on click + if (node.kind !== NODE_KIND_SECTION) return + + // Prevent toggling when using modifier keys or middle mouse button + if ( + e.metaKey || + e.ctrlKey || + e.shiftKey || + e.altKey || + e.button === 1 + ) { + return + } + toggleNode(node.id) + }} > setHovered(true)} - onMouseLeave={() => setHovered(false)} >
- {hasChildren && ( + {node.kind === NODE_KIND_SECTION && ( hasChildren && toggleNode(node.id)} + onClick={() => + node.kind === NODE_KIND_SECTION && toggleNode(node.id) + } /> )} { // add empty space to align with nodes that have children - !hasChildren &&
+ node.kind !== NODE_KIND_SECTION && ( +
+ ) } {linkText} -
- - {(hovered || isMobile) && !readOnlyMode && ( -
- } - tooltip="Create new page" - onClick={() => openDialog(DIALOG_ADD_PAGE, { parentId: node.id })} - /> - } - tooltip="Move page to new parent" - onClick={() => openDialog(DIALOG_MOVE_PAGE, { pageId: node.id })} - /> - {hasChildren && ( + {!readOnlyMode && ( +
} - tooltip="Sort pages" - onClick={() => openDialog(DIALOG_SORT_PAGES, { parent: node })} + actionName="add" + icon={} + tooltip="Create new page" + onClick={() => + openDialog(DIALOG_ADD_PAGE, { parentId: node.id }) + } /> - )} -
- )} + +
+ )} +
state.openDialog) + const reloadTree = useTreeStore((state) => state.reloadTree) + const hasChildren = children && children.length > 0 + const navigate = useNavigate() + const location = useLocation() + const setOpenMenuNodeId = useTreeNodeActionsMenusStore( + (s) => s.setOpenMenuNodeId, + ) + const open = useTreeNodeActionsMenusStore((s) => s.openMenuNodeId === node.id) + + const handleConvertPage = useCallback(() => { + convertPage( + nodeId, + nodeKind === NODE_KIND_PAGE ? NODE_KIND_SECTION : NODE_KIND_PAGE, + ) + .then(() => { + toast.success('Page converted successfully') + reloadTree() + }) + .catch(() => { + toast.error('Failed to convert page') + }) + }, [nodeId, nodeKind, reloadTree]) + + const redirectUrlAfterDelete = useCallback(() => { + if (location.pathname.startsWith('/' + node.path)) { + if (node.parentId) { + return node.path.substring(0, node.path.lastIndexOf('/')) + } else { + return '/' + } + } + + // remove leading slash + return location.pathname.startsWith('/') + ? location.pathname.substring(1) + : location.pathname + }, [location.pathname, node.path, node.parentId]) + + return ( + setOpenMenuNodeId(nextOpen ? node.id : null)} + > + + } + tooltip="Open more actions" + /> + + + { + openDialog(DIALOG_ADD_PAGE, { + parentId: nodeId, + nodeKind: NODE_KIND_PAGE, + }) + }} + > + Add Page + + { + openDialog(DIALOG_ADD_PAGE, { + parentId: nodeId, + nodeKind: NODE_KIND_SECTION, + }) + }} + > + Add + Section + + + { + navigate(`/e/${node.path}`) + }} + > + Edit{' '} + {nodeKind === NODE_KIND_PAGE ? 'Page' : 'Section'} + + {nodeKind === NODE_KIND_SECTION && hasChildren && ( + openDialog(DIALOG_SORT_PAGES, { parent: node })} + > + Sort Section + + )} + openDialog(DIALOG_MOVE_PAGE, { pageId: node.id })} + > + Move{' '} + {nodeKind === NODE_KIND_PAGE ? 'Page' : 'Section'} + + {nodeKind === NODE_KIND_SECTION && !hasChildren && ( + + Convert to + Page + + )} + + { + openDialog(DIALOG_DELETE_PAGE_CONFIRMATION, { + pageId: node?.id, + redirectUrl: redirectUrlAfterDelete(), + }) + }} + > + {' '} + Delete {nodeKind === NODE_KIND_PAGE ? 'Page' : 'Section'} + + + + ) +} diff --git a/ui/leafwiki-ui/src/features/tree/TreeView.tsx b/ui/leafwiki-ui/src/features/tree/TreeView.tsx index fd6a3e1a..23363ae0 100644 --- a/ui/leafwiki-ui/src/features/tree/TreeView.tsx +++ b/ui/leafwiki-ui/src/features/tree/TreeView.tsx @@ -1,10 +1,11 @@ import { TreeViewActionButton } from '@/features/tree/TreeViewActionButton' +import { NODE_KIND_PAGE, NODE_KIND_SECTION } from '@/lib/api/pages' import { DIALOG_ADD_PAGE, DIALOG_SORT_PAGES } from '@/lib/registries' import { getAncestorIds } from '@/lib/treeUtils' import { useIsReadOnly } from '@/lib/useIsReadOnly' import { useDialogsStore } from '@/stores/dialogs' import { useTreeStore } from '@/stores/tree' -import { List, Plus } from 'lucide-react' +import { FilePlus, FolderPlus, List } from 'lucide-react' import { useEffect } from 'react' import { useLocation } from 'react-router-dom' import { TreeNode } from './TreeNode' @@ -57,14 +58,30 @@ export default function TreeView() {
} + icon={} tooltip="Create new page" - onClick={() => openDialog(DIALOG_ADD_PAGE, { parentId: '' })} + onClick={() => + openDialog(DIALOG_ADD_PAGE, { + parentId: '', + nodeKind: NODE_KIND_PAGE, + }) + } + /> + } + tooltip="Create new section" + onClick={() => + openDialog(DIALOG_ADD_PAGE, { + parentId: '', + nodeKind: NODE_KIND_SECTION, + }) + } /> {tree && ( } + icon={} tooltip="Sort pages" onClick={() => openDialog(DIALOG_SORT_PAGES, { parent: tree })} /> diff --git a/ui/leafwiki-ui/src/features/tree/TreeViewActionButton.tsx b/ui/leafwiki-ui/src/features/tree/TreeViewActionButton.tsx index ed30cc06..5c24be0d 100644 --- a/ui/leafwiki-ui/src/features/tree/TreeViewActionButton.tsx +++ b/ui/leafwiki-ui/src/features/tree/TreeViewActionButton.tsx @@ -1,7 +1,7 @@ import { TooltipWrapper } from '@/components/TooltipWrapper' type TreeViewActionButtonProps = { - onClick: () => void + onClick?: () => void actionName: string icon: React.ReactNode tooltip: string @@ -18,7 +18,10 @@ export function TreeViewActionButton({