diff --git a/README.md b/README.md index 80abeadf..8642d8e6 100644 --- a/README.md +++ b/README.md @@ -843,6 +843,16 @@ gog drive move --parent gog drive delete # Move to trash gog drive delete --permanent # Permanently delete +# Reporting +gog drive tree --parent --depth 2 +gog drive du --parent --depth 1 +gog drive inventory --parent --depth 0 --max 500 + +# Sync +gog drive sync pull --folder --out ./drive-sync +gog drive sync push --folder --from ./drive-sync +gog drive sync pull --folder --out ./drive-sync --dry-run + # Permissions gog drive permissions gog drive share --to user --email user@example.com --role reader @@ -923,6 +933,10 @@ gog contacts get people/ --json | \ gog contacts delete people/ +# Dedupe +gog contacts dedupe +gog contacts dedupe --match email,phone,name --apply + # Workspace directory (requires Google Workspace) gog contacts directory list --max 50 gog contacts directory search "Jane" --max 50 diff --git a/internal/cmd/contacts.go b/internal/cmd/contacts.go index 991e8e0a..97e24c37 100644 --- a/internal/cmd/contacts.go +++ b/internal/cmd/contacts.go @@ -19,6 +19,7 @@ type ContactsCmd struct { Create ContactsCreateCmd `cmd:"" name:"create" aliases:"add,new" help:"Create a contact"` Update ContactsUpdateCmd `cmd:"" name:"update" aliases:"edit,set" help:"Update a contact"` Delete ContactsDeleteCmd `cmd:"" name:"delete" aliases:"rm,del,remove" help:"Delete a contact"` + Dedupe ContactsDedupeCmd `cmd:"" name:"dedupe" help:"Find and merge duplicate contacts"` Directory ContactsDirectoryCmd `cmd:"" name:"directory" help:"Directory contacts"` Other ContactsOtherCmd `cmd:"" name:"other" help:"Other contacts"` } diff --git a/internal/cmd/contacts_dedupe.go b/internal/cmd/contacts_dedupe.go new file mode 100644 index 00000000..b9d12107 --- /dev/null +++ b/internal/cmd/contacts_dedupe.go @@ -0,0 +1,513 @@ +package cmd + +import ( + "context" + "fmt" + "os" + "sort" + "strings" + + "google.golang.org/api/people/v1" + + "github.com/steipete/gogcli/internal/outfmt" + "github.com/steipete/gogcli/internal/ui" +) + +type ContactsDedupeCmd struct { + Match string `name:"match" help:"Match fields: email,phone,name" default:"email,phone,name"` + Max int64 `name:"max" aliases:"limit" help:"Max contacts to scan (0 = all)" default:"0"` + Apply bool `name:"apply" help:"Apply merge/delete operations"` +} + +func (c *ContactsDedupeCmd) Run(ctx context.Context, flags *RootFlags) error { + u := ui.FromContext(ctx) + account, err := requireAccount(flags) + if err != nil { + return err + } + + match, err := parseDedupeMatch(c.Match) + if err != nil { + return err + } + + svc, err := newPeopleContactsService(ctx, account) + if err != nil { + return err + } + + contacts, err := listContacts(svc, c.Max) + if err != nil { + return err + } + + groups := buildDedupeGroups(contacts, match) + if err := outputDedupeGroups(ctx, u, groups); err != nil { + return err + } + if !c.Apply { + return nil + } + if len(groups) == 0 { + return nil + } + + if err := confirmDestructive(ctx, flags, fmt.Sprintf("merge %d contact groups", len(groups))); err != nil { + return err + } + + for _, g := range groups { + merged := mergeContactGroup(g) + _, err := svc.People.UpdateContact(g.Primary.ResourceName, merged). + UpdatePersonFields("names,emailAddresses,phoneNumbers"). + Do() + if err != nil { + return err + } + for _, m := range g.Members { + if m.ResourceName == g.Primary.ResourceName { + continue + } + if _, err := svc.People.DeleteContact(m.ResourceName).Do(); err != nil { + return err + } + } + } + return nil +} + +type dedupeMatch struct { + Email bool + Phone bool + Name bool +} + +func parseDedupeMatch(value string) (dedupeMatch, error) { + value = strings.TrimSpace(value) + if value == "" { + return dedupeMatch{}, usage("empty --match") + } + out := dedupeMatch{} + for _, part := range strings.Split(value, ",") { + part = strings.TrimSpace(strings.ToLower(part)) + switch part { + case "email": + out.Email = true + case "phone": + out.Phone = true + case "name": + out.Name = true + case "": + continue + default: + return dedupeMatch{}, usagef("invalid --match %q (use email,phone,name)", part) + } + } + if !out.Email && !out.Phone && !out.Name { + return dedupeMatch{}, usage("invalid --match (no fields enabled)") + } + return out, nil +} + +func listContacts(svc *people.Service, maxResults int64) ([]*people.Person, error) { + out := make([]*people.Person, 0, 128) + var pageToken string + for { + pageSize := int64(500) + if maxResults > 0 && maxResults < pageSize { + pageSize = maxResults + } + call := svc.People.Connections.List(peopleMeResource). + PersonFields(contactsReadMask). + PageSize(pageSize). + PageToken(pageToken). + RequestSyncToken(false) + resp, err := call.Do() + if err != nil { + return nil, err + } + for _, p := range resp.Connections { + if p == nil { + continue + } + out = append(out, p) + if maxResults > 0 && int64(len(out)) >= maxResults { + return out, nil + } + } + if resp.NextPageToken == "" { + break + } + pageToken = resp.NextPageToken + } + return out, nil +} + +type dedupeGroup struct { + Primary *people.Person + Members []*people.Person + Merged contactSummary +} + +type contactSummary struct { + Resource string `json:"resource"` + Name string `json:"name,omitempty"` + Emails []string `json:"emails,omitempty"` + Phones []string `json:"phones,omitempty"` +} + +func buildDedupeGroups(contacts []*people.Person, match dedupeMatch) []dedupeGroup { + if len(contacts) == 0 { + return nil + } + uf := newUnionFind(len(contacts)) + seen := map[string]int{} + + for i, p := range contacts { + keys := contactKeys(p, match) + for _, key := range keys { + if j, ok := seen[key]; ok { + uf.union(i, j) + } else { + seen[key] = i + } + } + } + + groups := map[int][]*people.Person{} + for i, p := range contacts { + root := uf.find(i) + groups[root] = append(groups[root], p) + } + + out := make([]dedupeGroup, 0) + for _, members := range groups { + if len(members) < 2 { + continue + } + primary := choosePrimaryContact(members) + out = append(out, dedupeGroup{ + Primary: primary, + Members: members, + Merged: summarizeMergedContact(primary, members), + }) + } + + sort.Slice(out, func(i, j int) bool { + return out[i].Primary.ResourceName < out[j].Primary.ResourceName + }) + return out +} + +func contactKeys(p *people.Person, match dedupeMatch) []string { + if p == nil { + return nil + } + keys := make([]string, 0, 4) + if match.Email { + for _, e := range p.EmailAddresses { + if e == nil { + continue + } + if v := normalizeContactEmail(e.Value); v != "" { + keys = append(keys, "email:"+v) + } + } + } + if match.Phone { + for _, ph := range p.PhoneNumbers { + if ph == nil { + continue + } + if v := normalizePhone(ph.Value); v != "" { + keys = append(keys, "phone:"+v) + } + } + } + if match.Name { + if v := normalizeName(primaryName(p)); v != "" { + keys = append(keys, "name:"+v) + } + } + return keys +} + +func choosePrimaryContact(members []*people.Person) *people.Person { + if len(members) == 0 { + return nil + } + best := members[0] + bestScore := contactScore(best) + for _, m := range members[1:] { + if m == nil { + continue + } + score := contactScore(m) + if score > bestScore { + best = m + bestScore = score + } else if score == bestScore && m.ResourceName < best.ResourceName { + best = m + } + } + return best +} + +func contactScore(p *people.Person) int { + if p == nil { + return 0 + } + score := 0 + if primaryName(p) != "" { + score += 2 + } + score += len(p.EmailAddresses) * 2 + score += len(p.PhoneNumbers) * 2 + return score +} + +func summarizeMergedContact(primary *people.Person, members []*people.Person) contactSummary { + merged := mergeContactGroup(dedupeGroup{Primary: primary, Members: members}) + return contactSummary{ + Resource: merged.ResourceName, + Name: primaryName(merged), + Emails: uniqueEmails(merged.EmailAddresses), + Phones: uniquePhones(merged.PhoneNumbers), + } +} + +func mergeContactGroup(group dedupeGroup) *people.Person { + primary := group.Primary + if primary == nil { + return &people.Person{} + } + name := primaryName(primary) + nameSource := primary + emails := make([]*people.EmailAddress, 0) + phones := make([]*people.PhoneNumber, 0) + + seenEmails := map[string]bool{} + seenPhones := map[string]bool{} + + addEmail := func(value string) { + normalized := normalizeContactEmail(value) + if normalized == "" || seenEmails[normalized] { + return + } + seenEmails[normalized] = true + emails = append(emails, &people.EmailAddress{Value: strings.TrimSpace(value)}) + } + addPhone := func(value string) { + normalized := normalizePhone(value) + if normalized == "" || seenPhones[normalized] { + return + } + seenPhones[normalized] = true + phones = append(phones, &people.PhoneNumber{Value: strings.TrimSpace(value)}) + } + + for _, p := range orderedMembers(primary, group.Members) { + if p == nil { + continue + } + if name == "" { + if n := primaryName(p); n != "" { + name = n + nameSource = p + } + } + for _, e := range p.EmailAddresses { + if e == nil { + continue + } + addEmail(e.Value) + } + for _, ph := range p.PhoneNumbers { + if ph == nil { + continue + } + addPhone(ph.Value) + } + } + + merged := *primary + if name != "" { + if nameSource == primary && len(primary.Names) > 0 { + merged.Names = primary.Names + } else { + merged.Names = []*people.Name{{DisplayName: name}} + } + } + if len(emails) > 0 { + merged.EmailAddresses = emails + } + if len(phones) > 0 { + merged.PhoneNumbers = phones + } + return &merged +} + +func orderedMembers(primary *people.Person, members []*people.Person) []*people.Person { + if primary == nil || len(members) <= 1 { + return members + } + out := make([]*people.Person, 0, len(members)) + out = append(out, primary) + for _, m := range members { + if m == nil || m.ResourceName == primary.ResourceName { + continue + } + out = append(out, m) + } + return out +} + +func outputDedupeGroups(ctx context.Context, u *ui.UI, groups []dedupeGroup) error { + if outfmt.IsJSON(ctx) { + out := make([]map[string]any, 0, len(groups)) + for _, g := range groups { + members := make([]contactSummary, 0, len(g.Members)) + for _, m := range g.Members { + members = append(members, summarizeContact(m)) + } + out = append(out, map[string]any{ + "primary": summarizeContact(g.Primary), + "merged": g.Merged, + "members": members, + }) + } + return outfmt.WriteJSON(ctx, os.Stdout, map[string]any{"groups": out}) + } + + if len(groups) == 0 { + if u != nil { + u.Err().Println("No duplicates") + } + return nil + } + + w, flush := tableWriter(ctx) + defer flush() + fmt.Fprintln(w, "GROUP\tACTION\tRESOURCE\tNAME\tEMAIL\tPHONE") + for i, g := range groups { + for _, m := range g.Members { + action := "merge" + if g.Primary != nil && m.ResourceName == g.Primary.ResourceName { + action = "keep" + } + fmt.Fprintf(w, "%d\t%s\t%s\t%s\t%s\t%s\n", + i+1, + action, + m.ResourceName, + sanitizeTab(primaryName(m)), + sanitizeTab(primaryEmail(m)), + sanitizeTab(primaryPhone(m)), + ) + } + } + return nil +} + +func summarizeContact(p *people.Person) contactSummary { + if p == nil { + return contactSummary{} + } + return contactSummary{ + Resource: p.ResourceName, + Name: primaryName(p), + Emails: uniqueEmails(p.EmailAddresses), + Phones: uniquePhones(p.PhoneNumbers), + } +} + +func uniqueEmails(list []*people.EmailAddress) []string { + seen := map[string]bool{} + out := make([]string, 0, len(list)) + for _, e := range list { + if e == nil { + continue + } + normalized := normalizeContactEmail(e.Value) + if normalized == "" || seen[normalized] { + continue + } + seen[normalized] = true + out = append(out, strings.TrimSpace(e.Value)) + } + return out +} + +func uniquePhones(list []*people.PhoneNumber) []string { + seen := map[string]bool{} + out := make([]string, 0, len(list)) + for _, p := range list { + if p == nil { + continue + } + normalized := normalizePhone(p.Value) + if normalized == "" || seen[normalized] { + continue + } + seen[normalized] = true + out = append(out, strings.TrimSpace(p.Value)) + } + return out +} + +func normalizeContactEmail(value string) string { + return strings.ToLower(strings.TrimSpace(value)) +} + +func normalizePhone(value string) string { + out := make([]rune, 0, len(value)) + for _, r := range value { + if r >= '0' && r <= '9' { + out = append(out, r) + } + } + return string(out) +} + +func normalizeName(value string) string { + parts := strings.Fields(strings.ToLower(strings.TrimSpace(value))) + return strings.Join(parts, " ") +} + +type unionFind struct { + parent []int + rank []int +} + +func newUnionFind(n int) *unionFind { + parent := make([]int, n) + rank := make([]int, n) + for i := range parent { + parent[i] = i + } + return &unionFind{parent: parent, rank: rank} +} + +func (u *unionFind) find(x int) int { + if u.parent[x] != x { + u.parent[x] = u.find(u.parent[x]) + } + return u.parent[x] +} + +func (u *unionFind) union(a int, b int) { + ra := u.find(a) + rb := u.find(b) + if ra == rb { + return + } + if u.rank[ra] < u.rank[rb] { + u.parent[ra] = rb + return + } + if u.rank[ra] > u.rank[rb] { + u.parent[rb] = ra + return + } + u.parent[rb] = ra + u.rank[ra]++ +} diff --git a/internal/cmd/contacts_dedupe_test.go b/internal/cmd/contacts_dedupe_test.go new file mode 100644 index 00000000..a0f24c45 --- /dev/null +++ b/internal/cmd/contacts_dedupe_test.go @@ -0,0 +1,66 @@ +package cmd + +import ( + "testing" + + "google.golang.org/api/people/v1" +) + +func TestParseDedupeMatch(t *testing.T) { + if _, err := parseDedupeMatch("email,phone,name"); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, err := parseDedupeMatch("bad"); err == nil { + t.Fatalf("expected error for invalid match") + } +} + +func TestNormalizePhone(t *testing.T) { + got := normalizePhone("(415) 555-1212") + if got != "4155551212" { + t.Fatalf("normalizePhone = %q", got) + } +} + +func TestBuildDedupeGroups(t *testing.T) { + p1 := person("people/1", "Alice A", "alice@example.com", "") + p2 := person("people/2", "Alice A", "ALICE@example.com", "") + p3 := person("people/3", "Bob B", "bob@example.com", "") + + match, _ := parseDedupeMatch("email") + groups := buildDedupeGroups([]*people.Person{p1, p2, p3}, match) + if len(groups) != 1 { + t.Fatalf("groups = %d, want 1", len(groups)) + } + if len(groups[0].Members) != 2 { + t.Fatalf("group members = %d, want 2", len(groups[0].Members)) + } +} + +func TestMergeContactGroup(t *testing.T) { + primary := person("people/1", "Alice A", "alice@example.com", "") + other := person("people/2", "", "alice+alt@example.com", "123") + group := dedupeGroup{Primary: primary, Members: []*people.Person{primary, other}} + + merged := mergeContactGroup(group) + if len(merged.EmailAddresses) != 2 { + t.Fatalf("emails = %d, want 2", len(merged.EmailAddresses)) + } + if len(merged.PhoneNumbers) != 1 { + t.Fatalf("phones = %d, want 1", len(merged.PhoneNumbers)) + } +} + +func person(resource string, name string, email string, phone string) *people.Person { + p := &people.Person{ResourceName: resource} + if name != "" { + p.Names = []*people.Name{{DisplayName: name}} + } + if email != "" { + p.EmailAddresses = []*people.EmailAddress{{Value: email}} + } + if phone != "" { + p.PhoneNumbers = []*people.PhoneNumber{{Value: phone}} + } + return p +} diff --git a/internal/cmd/drive.go b/internal/cmd/drive.go index 1d1dacbc..f887281e 100644 --- a/internal/cmd/drive.go +++ b/internal/cmd/drive.go @@ -33,6 +33,8 @@ var ( ) const ( + driveRootID = "root" + driveMimeFolder = "application/vnd.google-apps.folder" driveMimeGoogleDoc = "application/vnd.google-apps.document" driveMimeGoogleSheet = "application/vnd.google-apps.spreadsheet" driveMimeGoogleSlides = "application/vnd.google-apps.presentation" @@ -63,6 +65,10 @@ const ( type DriveCmd struct { Ls DriveLsCmd `cmd:"" name:"ls" help:"List files in a folder (default: root)"` Search DriveSearchCmd `cmd:"" name:"search" help:"Full-text search across Drive"` + Tree DriveTreeCmd `cmd:"" name:"tree" help:"Tree view of a folder"` + Du DriveDuCmd `cmd:"" name:"du" help:"Folder sizes (disk usage)"` + Inventory DriveInventoryCmd `cmd:"" name:"inventory" help:"Inventory report for a folder"` + Sync DriveSyncCmd `cmd:"" name:"sync" help:"Sync Drive folders with local"` Get DriveGetCmd `cmd:"" name:"get" help:"Get file metadata"` Download DriveDownloadCmd `cmd:"" name:"download" help:"Download a file (exports Google Docs formats)"` Copy DriveCopyCmd `cmd:"" name:"copy" help:"Copy a file"` @@ -96,7 +102,7 @@ func (c *DriveLsCmd) Run(ctx context.Context, flags *RootFlags) error { folderID := strings.TrimSpace(c.Parent) if folderID == "" { - folderID = "root" + folderID = driveRootID } svc, err := newDriveService(ctx, account) @@ -1040,7 +1046,7 @@ func escapeDriveQueryString(s string) string { } func driveType(mimeType string) string { - if mimeType == "application/vnd.google-apps.folder" { + if mimeType == driveMimeFolder { return "folder" } return strFile diff --git a/internal/cmd/drive_reporting.go b/internal/cmd/drive_reporting.go new file mode 100644 index 00000000..bb7fdbeb --- /dev/null +++ b/internal/cmd/drive_reporting.go @@ -0,0 +1,530 @@ +package cmd + +import ( + "context" + "fmt" + "os" + "path" + "sort" + "strings" + + "google.golang.org/api/drive/v3" + gapi "google.golang.org/api/googleapi" + + "github.com/steipete/gogcli/internal/outfmt" + "github.com/steipete/gogcli/internal/ui" +) + +const driveDefaultPageSize = 1000 + +type DriveTreeCmd struct { + Parent string `name:"parent" help:"Folder ID to start from (default: root)"` + Depth int `name:"depth" help:"Max depth (0 = unlimited)" default:"2"` + Max int `name:"max" help:"Max items to return (0 = unlimited)" default:"0"` +} + +func (c *DriveTreeCmd) Run(ctx context.Context, flags *RootFlags) error { + u := ui.FromContext(ctx) + account, err := requireAccount(flags) + if err != nil { + return err + } + + rootID := strings.TrimSpace(c.Parent) + if rootID == "" { + rootID = driveRootID + } + depth := c.Depth + if depth < 0 { + depth = 0 + } + maxItems := c.Max + if maxItems < 0 { + maxItems = 0 + } + + svc, err := newDriveService(ctx, account) + if err != nil { + return err + } + + items, truncated, err := listDriveTree(ctx, svc, driveTreeOptions{ + RootID: rootID, + MaxDepth: depth, + MaxItems: maxItems, + Fields: driveTreeFields, + IncludeFiles: true, + IncludeFolder: true, + }) + if err != nil { + return err + } + + if outfmt.IsJSON(ctx) { + return outfmt.WriteJSON(ctx, os.Stdout, map[string]any{ + "items": items, + "truncated": truncated, + }) + } + + if len(items) == 0 { + u.Err().Println("No files") + return nil + } + + w, flush := tableWriter(ctx) + defer flush() + fmt.Fprintln(w, "PATH\tTYPE\tSIZE\tMODIFIED\tID") + for _, it := range items { + fmt.Fprintf( + w, + "%s\t%s\t%s\t%s\t%s\n", + sanitizeTab(it.Path), + driveType(it.MimeType), + formatDriveSize(it.Size), + formatDateTime(it.ModifiedTime), + it.ID, + ) + } + if truncated { + u.Err().Println("Results truncated; increase --max to see more.") + } + return nil +} + +type DriveInventoryCmd struct { + Parent string `name:"parent" help:"Folder ID to start from (default: root)"` + Depth int `name:"depth" help:"Max depth (0 = unlimited)" default:"0"` + Max int `name:"max" help:"Max items to return (0 = unlimited)" default:"500"` + Sort string `name:"sort" help:"Sort by path|size|modified" default:"path"` + Order string `name:"order" help:"Sort order: asc|desc" default:"asc"` +} + +func (c *DriveInventoryCmd) Run(ctx context.Context, flags *RootFlags) error { + u := ui.FromContext(ctx) + account, err := requireAccount(flags) + if err != nil { + return err + } + + rootID := strings.TrimSpace(c.Parent) + if rootID == "" { + rootID = driveRootID + } + depth := c.Depth + if depth < 0 { + depth = 0 + } + maxItems := c.Max + if maxItems < 0 { + maxItems = 0 + } + + svc, err := newDriveService(ctx, account) + if err != nil { + return err + } + + items, truncated, err := listDriveTree(ctx, svc, driveTreeOptions{ + RootID: rootID, + MaxDepth: depth, + MaxItems: maxItems, + Fields: driveInventoryFields, + IncludeFiles: true, + IncludeFolder: true, + }) + if err != nil { + return err + } + + sortDriveInventory(items, c.Sort, c.Order) + + if outfmt.IsJSON(ctx) { + return outfmt.WriteJSON(ctx, os.Stdout, map[string]any{ + "items": items, + "truncated": truncated, + }) + } + + if len(items) == 0 { + u.Err().Println("No files") + return nil + } + + w, flush := tableWriter(ctx) + defer flush() + fmt.Fprintln(w, "PATH\tTYPE\tSIZE\tMODIFIED\tOWNER\tID") + for _, it := range items { + owner := "-" + if len(it.Owners) > 0 { + owner = it.Owners[0] + } + fmt.Fprintf( + w, + "%s\t%s\t%s\t%s\t%s\t%s\n", + sanitizeTab(it.Path), + driveType(it.MimeType), + formatDriveSize(it.Size), + formatDateTime(it.ModifiedTime), + owner, + it.ID, + ) + } + if truncated { + u.Err().Println("Results truncated; increase --max to see more.") + } + return nil +} + +type DriveDuCmd struct { + Parent string `name:"parent" help:"Folder ID to start from (default: root)"` + Depth int `name:"depth" help:"Depth for folder totals" default:"1"` + Max int `name:"max" help:"Max folders to return (0 = unlimited)" default:"50"` + Sort string `name:"sort" help:"Sort by size|path|files" default:"size"` + Order string `name:"order" help:"Sort order: asc|desc" default:"desc"` +} + +func (c *DriveDuCmd) Run(ctx context.Context, flags *RootFlags) error { + u := ui.FromContext(ctx) + account, err := requireAccount(flags) + if err != nil { + return err + } + + rootID := strings.TrimSpace(c.Parent) + if rootID == "" { + rootID = driveRootID + } + depth := c.Depth + if depth < 0 { + depth = 0 + } + maxItems := c.Max + if maxItems < 0 { + maxItems = 0 + } + + svc, err := newDriveService(ctx, account) + if err != nil { + return err + } + + items, truncated, err := listDriveTree(ctx, svc, driveTreeOptions{ + RootID: rootID, + MaxDepth: 0, + MaxItems: 0, + Fields: driveTreeFields, + IncludeFiles: true, + IncludeFolder: true, + }) + if err != nil { + return err + } + if truncated { + return fmt.Errorf("drive du truncated unexpectedly") + } + + summaries := summarizeDriveDu(items, rootID, depth) + sortDriveDu(summaries, c.Sort, c.Order) + + if maxItems > 0 && len(summaries) > maxItems { + summaries = summaries[:maxItems] + } + + if outfmt.IsJSON(ctx) { + return outfmt.WriteJSON(ctx, os.Stdout, map[string]any{ + "folders": summaries, + }) + } + + if len(summaries) == 0 { + u.Err().Println("No folders") + return nil + } + + w, flush := tableWriter(ctx) + defer flush() + fmt.Fprintln(w, "PATH\tSIZE\tFILES") + for _, f := range summaries { + fmt.Fprintf(w, "%s\t%s\t%d\n", sanitizeTab(f.Path), formatDriveSize(f.Size), f.Files) + } + return nil +} + +type driveTreeItem struct { + ID string `json:"id"` + Name string `json:"name"` + Path string `json:"path"` + ParentID string `json:"parentId,omitempty"` + MimeType string `json:"mimeType"` + Size int64 `json:"size,omitempty"` + ModifiedTime string `json:"modifiedTime,omitempty"` + Owners []string `json:"owners,omitempty"` + MD5 string `json:"md5,omitempty"` + Depth int `json:"depth"` +} + +func (d driveTreeItem) IsFolder() bool { + return d.MimeType == driveMimeFolder +} + +type driveTreeOptions struct { + RootID string + MaxDepth int + MaxItems int + Fields string + IncludeFiles bool + IncludeFolder bool +} + +type driveFolderQueueItem struct { + ID string + Path string + Depth int +} + +const ( + driveTreeFields = "id,name,mimeType,size,modifiedTime" + driveInventoryFields = "id,name,mimeType,size,modifiedTime,owners(emailAddress,displayName)" +) + +func listDriveTree(ctx context.Context, svc *drive.Service, opts driveTreeOptions) ([]driveTreeItem, bool, error) { + rootID := strings.TrimSpace(opts.RootID) + if rootID == "" { + rootID = driveRootID + } + fields := strings.TrimSpace(opts.Fields) + if fields == "" { + fields = driveTreeFields + } + + queue := []driveFolderQueueItem{{ID: rootID, Path: "", Depth: 0}} + out := make([]driveTreeItem, 0, 128) + truncated := false + + for len(queue) > 0 { + folder := queue[0] + queue = queue[1:] + + children, err := listDriveChildren(ctx, svc, folder.ID, fields) + if err != nil { + return nil, false, err + } + for _, child := range children { + if child == nil { + continue + } + depth := folder.Depth + 1 + item := driveTreeItem{ + ID: child.Id, + Name: child.Name, + Path: joinDrivePath(folder.Path, child.Name), + ParentID: folder.ID, + MimeType: child.MimeType, + Size: child.Size, + ModifiedTime: child.ModifiedTime, + Owners: driveOwners(child), + MD5: child.Md5Checksum, + Depth: depth, + } + + if item.IsFolder() { + if opts.IncludeFolder { + out = append(out, item) + } + if opts.MaxDepth <= 0 || depth < opts.MaxDepth { + queue = append(queue, driveFolderQueueItem{ID: child.Id, Path: item.Path, Depth: depth}) + } + } else if opts.IncludeFiles { + out = append(out, item) + } + + if opts.MaxItems > 0 && len(out) >= opts.MaxItems { + truncated = true + return out, truncated, nil + } + } + } + + return out, truncated, nil +} + +func listDriveChildren(ctx context.Context, svc *drive.Service, parentID string, fields string) ([]*drive.File, error) { + if parentID == "" { + parentID = driveRootID + } + q := buildDriveListQuery(parentID, "") + out := make([]*drive.File, 0, 64) + var pageToken string + + for { + call := svc.Files.List(). + Q(q). + PageSize(driveDefaultPageSize). + PageToken(pageToken). + OrderBy("folder,name"). + SupportsAllDrives(true). + IncludeItemsFromAllDrives(true). + Fields( + gapi.Field("nextPageToken"), + gapi.Field("files("+fields+")"), + ). + Context(ctx) + resp, err := call.Do() + if err != nil { + return nil, err + } + out = append(out, resp.Files...) + if resp.NextPageToken == "" { + break + } + pageToken = resp.NextPageToken + } + + return out, nil +} + +func joinDrivePath(parent string, name string) string { + name = sanitizeDriveName(name) + if parent == "" { + return name + } + return path.Join(parent, name) +} + +func sanitizeDriveName(name string) string { + name = strings.ReplaceAll(name, "/", "_") + name = strings.ReplaceAll(name, "\\", "_") + name = strings.TrimSpace(name) + if name == "" || name == "." || name == ".." { + return "_" + } + return name +} + +func driveOwners(f *drive.File) []string { + if f == nil || len(f.Owners) == 0 { + return nil + } + out := make([]string, 0, len(f.Owners)) + for _, owner := range f.Owners { + if owner == nil { + continue + } + if owner.EmailAddress != "" { + out = append(out, owner.EmailAddress) + } else if owner.DisplayName != "" { + out = append(out, owner.DisplayName) + } + } + return out +} + +type driveDuSummary struct { + ID string `json:"id"` + Path string `json:"path"` + Size int64 `json:"size"` + Files int `json:"files"` + Depth int `json:"depth"` +} + +func summarizeDriveDu(items []driveTreeItem, rootID string, depthLimit int) []driveDuSummary { + type folderMeta struct { + path string + depth int + } + + parentByID := map[string]string{} + folderMetaByID := map[string]folderMeta{ + rootID: {path: ".", depth: 0}, + } + for _, it := range items { + if it.IsFolder() { + parentByID[it.ID] = it.ParentID + folderMetaByID[it.ID] = folderMeta{path: it.Path, depth: it.Depth} + } + } + + sizes := map[string]*driveDuSummary{} + getSummary := func(id string) *driveDuSummary { + if s, ok := sizes[id]; ok { + return s + } + meta := folderMetaByID[id] + s := &driveDuSummary{ + ID: id, + Path: meta.path, + Depth: meta.depth, + } + sizes[id] = s + return s + } + + for _, it := range items { + if it.IsFolder() { + continue + } + parentID := it.ParentID + for parentID != "" { + s := getSummary(parentID) + s.Size += it.Size + s.Files++ + parentID = parentByID[parentID] + } + } + + out := make([]driveDuSummary, 0, len(sizes)) + for _, s := range sizes { + if depthLimit > 0 && s.Depth > depthLimit { + continue + } + out = append(out, *s) + } + return out +} + +func sortDriveDu(items []driveDuSummary, sortBy string, order string) { + sortBy = strings.ToLower(strings.TrimSpace(sortBy)) + order = strings.ToLower(strings.TrimSpace(order)) + desc := order == "desc" + + less := func(i, j int) bool { return false } + switch sortBy { + case "path": + less = func(i, j int) bool { return items[i].Path < items[j].Path } + case "files": + less = func(i, j int) bool { return items[i].Files < items[j].Files } + default: + less = func(i, j int) bool { return items[i].Size < items[j].Size } + } + + sort.Slice(items, func(i, j int) bool { + if desc { + return !less(i, j) + } + return less(i, j) + }) +} + +func sortDriveInventory(items []driveTreeItem, sortBy string, order string) { + sortBy = strings.ToLower(strings.TrimSpace(sortBy)) + order = strings.ToLower(strings.TrimSpace(order)) + desc := order == "desc" + + less := func(i, j int) bool { return false } + switch sortBy { + case "size": + less = func(i, j int) bool { return items[i].Size < items[j].Size } + case "modified": + less = func(i, j int) bool { return items[i].ModifiedTime < items[j].ModifiedTime } + default: + less = func(i, j int) bool { return items[i].Path < items[j].Path } + } + + sort.Slice(items, func(i, j int) bool { + if desc { + return !less(i, j) + } + return less(i, j) + }) +} diff --git a/internal/cmd/drive_reporting_test.go b/internal/cmd/drive_reporting_test.go new file mode 100644 index 00000000..89df6aa9 --- /dev/null +++ b/internal/cmd/drive_reporting_test.go @@ -0,0 +1,63 @@ +package cmd + +import "testing" + +func TestSanitizeDriveName(t *testing.T) { + cases := []struct { + in string + want string + }{ + {in: "", want: "_"}, + {in: ".", want: "_"}, + {in: "..", want: "_"}, + {in: "hello", want: "hello"}, + {in: "a/b", want: "a_b"}, + {in: "a\\b", want: "a_b"}, + {in: " foo ", want: "foo"}, + } + for _, tc := range cases { + if got := sanitizeDriveName(tc.in); got != tc.want { + t.Fatalf("sanitizeDriveName(%q) = %q, want %q", tc.in, got, tc.want) + } + } +} + +func TestJoinDrivePath(t *testing.T) { + if got := joinDrivePath("", "file"); got != "file" { + t.Fatalf("joinDrivePath empty = %q", got) + } + if got := joinDrivePath("dir", "file"); got != "dir/file" { + t.Fatalf("joinDrivePath dir = %q", got) + } +} + +func TestSummarizeDriveDu(t *testing.T) { + items := []driveTreeItem{ + {ID: "f1", Path: "a", ParentID: "root", MimeType: driveMimeFolder, Depth: 1}, + {ID: "f2", Path: "a/b", ParentID: "f1", MimeType: driveMimeFolder, Depth: 2}, + {ID: "file1", Path: "a/file.txt", ParentID: "f1", MimeType: "text/plain", Size: 10}, + {ID: "file2", Path: "a/b/file2.txt", ParentID: "f2", MimeType: "text/plain", Size: 5}, + } + + summaries := summarizeDriveDu(items, "root", 1) + if len(summaries) == 0 { + t.Fatalf("expected summaries") + } + + var rootSize int64 + var aSize int64 + for _, s := range summaries { + if s.Path == "." { + rootSize = s.Size + } + if s.Path == "a" { + aSize = s.Size + } + } + if rootSize != 15 { + t.Fatalf("root size = %d, want 15", rootSize) + } + if aSize != 15 { + t.Fatalf("a size = %d, want 15", aSize) + } +} diff --git a/internal/cmd/drive_sync.go b/internal/cmd/drive_sync.go new file mode 100644 index 00000000..1a75dea0 --- /dev/null +++ b/internal/cmd/drive_sync.go @@ -0,0 +1,813 @@ +package cmd + +import ( + "context" + "crypto/md5" // #nosec G501 -- Drive API exposes MD5 checksums; used only for sync change detection. + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "io" + "os" + "path" + "path/filepath" + "strings" + "time" + + "google.golang.org/api/drive/v3" + gapi "google.golang.org/api/googleapi" + + "github.com/steipete/gogcli/internal/config" + "github.com/steipete/gogcli/internal/outfmt" + "github.com/steipete/gogcli/internal/ui" +) + +const ( + driveSyncStateFile = ".gog-sync.json" + driveSyncVersion = 1 + timeSkewTolerance = 2 * time.Second +) + +type DriveSyncCmd struct { + Pull DriveSyncPullCmd `cmd:"" name:"pull" help:"Sync Drive folder to local"` + Push DriveSyncPushCmd `cmd:"" name:"push" help:"Sync local folder to Drive"` +} + +type DriveSyncPullCmd struct { + Folder string `name:"folder" help:"Drive folder ID (required if no state file)"` + Out string `name:"out" help:"Local destination directory (required if no state file)"` + State string `name:"state" help:"Path to sync state file (default: /.gog-sync.json)"` + Delete bool `name:"delete" help:"Delete local files not present in Drive"` + Checksum bool `name:"checksum" help:"Use checksums to detect changes"` + Include []string `name:"include" help:"Include glob (repeatable)"` + Exclude []string `name:"exclude" help:"Exclude glob (repeatable)"` +} + +func (c *DriveSyncPullCmd) Run(ctx context.Context, flags *RootFlags) error { + u := ui.FromContext(ctx) + account, err := requireAccount(flags) + if err != nil { + return err + } + + statePath, rootPath, cfg, err := loadDriveSyncConfig(c.State, c.Out, "pull", account) + if err != nil { + return err + } + if strings.TrimSpace(c.Folder) != "" { + cfg.FolderID = strings.TrimSpace(c.Folder) + } + if len(c.Include) > 0 { + cfg.Include = c.Include + } + if len(c.Exclude) > 0 { + cfg.Exclude = c.Exclude + } + cfg.Exclude = ensureDriveSyncExcludes(cfg.Exclude) + if cfg.FolderID == "" || rootPath == "" { + return usage("missing --folder or --out (or state file)") + } + if mkdirErr := os.MkdirAll(rootPath, 0o750); mkdirErr != nil { + return mkdirErr + } + + svc, err := newDriveService(ctx, account) + if err != nil { + return err + } + + remoteItems, _, err := listDriveTree(ctx, svc, driveTreeOptions{ + RootID: cfg.FolderID, + MaxDepth: 0, + MaxItems: 0, + Fields: driveSyncFields, + IncludeFiles: true, + IncludeFolder: true, + }) + if err != nil { + return err + } + + remoteFiles, remoteFolders := splitDriveItems(remoteItems, true) + localFiles, err := walkLocalFiles(rootPath, cfg.Include, cfg.Exclude, c.Checksum) + if err != nil { + return err + } + + plan := buildDrivePullPlan(remoteFiles, remoteFolders, localFiles, cfg, c.Delete, c.Checksum) + if err := outputDriveSyncPlan(ctx, u, plan); err != nil { + return err + } + + if flags != nil && flags.DryRun { + return nil + } + + if plan.HasDeletes() { + if err := confirmDestructive(ctx, flags, "delete local files"); err != nil { + return err + } + } + + if err := applyDrivePullPlan(ctx, svc, rootPath, plan); err != nil { + return err + } + + return saveDriveSyncState(statePath, cfg) +} + +type DriveSyncPushCmd struct { + Folder string `name:"folder" help:"Drive folder ID (required if no state file)"` + From string `name:"from" help:"Local source directory (required if no state file)"` + State string `name:"state" help:"Path to sync state file (default: /.gog-sync.json)"` + Delete bool `name:"delete" help:"Delete Drive files not present locally"` + Checksum bool `name:"checksum" help:"Use checksums to detect changes"` + Include []string `name:"include" help:"Include glob (repeatable)"` + Exclude []string `name:"exclude" help:"Exclude glob (repeatable)"` +} + +func (c *DriveSyncPushCmd) Run(ctx context.Context, flags *RootFlags) error { + u := ui.FromContext(ctx) + account, err := requireAccount(flags) + if err != nil { + return err + } + + statePath, rootPath, cfg, err := loadDriveSyncConfig(c.State, c.From, "push", account) + if err != nil { + return err + } + if strings.TrimSpace(c.Folder) != "" { + cfg.FolderID = strings.TrimSpace(c.Folder) + } + if len(c.Include) > 0 { + cfg.Include = c.Include + } + if len(c.Exclude) > 0 { + cfg.Exclude = c.Exclude + } + cfg.Exclude = ensureDriveSyncExcludes(cfg.Exclude) + if cfg.FolderID == "" || rootPath == "" { + return usage("missing --folder or --from (or state file)") + } + + svc, err := newDriveService(ctx, account) + if err != nil { + return err + } + + remoteItems, _, err := listDriveTree(ctx, svc, driveTreeOptions{ + RootID: cfg.FolderID, + MaxDepth: 0, + MaxItems: 0, + Fields: driveSyncFields, + IncludeFiles: true, + IncludeFolder: true, + }) + if err != nil { + return err + } + + remoteFiles, remoteFolders := splitDriveItems(remoteItems, false) + localFiles, err := walkLocalFiles(rootPath, cfg.Include, cfg.Exclude, c.Checksum) + if err != nil { + return err + } + + plan := buildDrivePushPlan(remoteFiles, localFiles, cfg, c.Delete, c.Checksum) + if err := outputDriveSyncPlan(ctx, u, plan); err != nil { + return err + } + + if flags != nil && flags.DryRun { + return nil + } + + if plan.HasDeletes() { + if err := confirmDestructive(ctx, flags, "delete Drive files"); err != nil { + return err + } + } + + if err := applyDrivePushPlan(ctx, svc, cfg.FolderID, rootPath, remoteFolders, plan); err != nil { + return err + } + + return saveDriveSyncState(statePath, cfg) +} + +type driveSyncConfig struct { + Version int `json:"version"` + Direction string `json:"direction"` + Account string `json:"account"` + FolderID string `json:"folderId"` + LocalRoot string `json:"localRoot"` + Include []string `json:"include,omitempty"` + Exclude []string `json:"exclude,omitempty"` + UpdatedAt string `json:"updatedAt,omitempty"` +} + +func loadDriveSyncConfig(statePath string, rootPath string, direction string, account string) (string, string, driveSyncConfig, error) { + rootPath = strings.TrimSpace(rootPath) + if rootPath != "" { + expanded, err := config.ExpandPath(rootPath) + if err != nil { + return "", "", driveSyncConfig{}, err + } + rootPath = expanded + } + + statePath, err := resolveDriveSyncStatePath(statePath, rootPath) + if err != nil { + return "", "", driveSyncConfig{}, err + } + + cfg := driveSyncConfig{ + Version: driveSyncVersion, + Direction: direction, + Account: account, + LocalRoot: rootPath, + Exclude: []string{driveSyncStateFile}, + } + + if statePath == "" { + return "", rootPath, cfg, nil + } + + if data, err := os.ReadFile(statePath); err == nil { //nolint:gosec // state path is explicit CLI input or derived from local sync root + var stored driveSyncConfig + if jsonErr := json.Unmarshal(data, &stored); jsonErr == nil { + if cfg.FolderID == "" { + cfg.FolderID = stored.FolderID + } + if cfg.LocalRoot == "" { + cfg.LocalRoot = stored.LocalRoot + rootPath = stored.LocalRoot + } + if len(cfg.Include) == 0 { + cfg.Include = stored.Include + } + cfg.Exclude = append(cfg.Exclude, stored.Exclude...) + } + } else if !os.IsNotExist(err) { + return "", "", driveSyncConfig{}, fmt.Errorf("read sync state: %w", err) + } + + return statePath, rootPath, cfg, nil +} + +func resolveDriveSyncStatePath(explicit string, rootPath string) (string, error) { + explicit = strings.TrimSpace(explicit) + if explicit != "" { + expanded, err := config.ExpandPath(explicit) + if err != nil { + return "", err + } + return expanded, nil + } + if rootPath == "" { + return "", nil + } + return filepath.Join(rootPath, driveSyncStateFile), nil +} + +func saveDriveSyncState(statePath string, cfg driveSyncConfig) error { + if statePath == "" { + return nil + } + cfg.Version = driveSyncVersion + cfg.UpdatedAt = time.Now().UTC().Format(time.RFC3339) + + data, err := json.MarshalIndent(cfg, "", " ") + if err != nil { + return fmt.Errorf("encode sync state: %w", err) + } + data = append(data, '\n') + + if err := os.WriteFile(statePath, data, 0o600); err != nil { + return fmt.Errorf("write sync state: %w", err) + } + return nil +} + +type localFileInfo struct { + Path string + Full string + Size int64 + ModTime time.Time + MD5 string +} + +const driveSyncFields = "id,name,mimeType,size,modifiedTime,md5Checksum" + +func splitDriveItems(items []driveTreeItem, exportDocs bool) (map[string]driveTreeItem, map[string]driveTreeItem) { + files := map[string]driveTreeItem{} + folders := map[string]driveTreeItem{} + for _, it := range items { + if it.IsFolder() { + folders[it.Path] = it + continue + } + relPath := it.Path + if exportDocs && strings.HasPrefix(it.MimeType, "application/vnd.google-apps.") { + exportExt := driveExportExtension(driveExportMimeType(it.MimeType)) + relPath = replaceExt(relPath, exportExt) + } + it.Path = relPath + files[relPath] = it + } + return files, folders +} + +func walkLocalFiles(root string, includes []string, excludes []string, checksum bool) (map[string]localFileInfo, error) { + files := map[string]localFileInfo{} + if root == "" { + return files, nil + } + + info, err := os.Stat(root) + if err != nil { + return nil, err + } + if !info.IsDir() { + return nil, fmt.Errorf("sync root is not a directory: %s", root) + } + + err = filepath.WalkDir(root, func(path string, d os.DirEntry, err error) error { + if err != nil { + return err + } + if path == root { + return nil + } + rel, relErr := filepath.Rel(root, path) + if relErr != nil { + return relErr + } + rel = filepath.ToSlash(rel) + if d.IsDir() { + if !allowSyncPath(rel, nil, excludes) && len(includes) == 0 { + return filepath.SkipDir + } + return nil + } + if !allowSyncPath(rel, includes, excludes) { + return nil + } + if d.Type()&os.ModeSymlink != 0 { + return nil + } + info, statErr := d.Info() + if statErr != nil { + return statErr + } + entry := localFileInfo{ + Path: rel, + Full: path, + Size: info.Size(), + ModTime: info.ModTime(), + } + if checksum { + if sum, sumErr := fileMD5(path); sumErr == nil { + entry.MD5 = sum + } + } + files[rel] = entry + return nil + }) + if err != nil { + return nil, err + } + return files, nil +} + +func allowSyncPath(rel string, includes []string, excludes []string) bool { + rel = strings.TrimSpace(rel) + if rel == "" { + return true + } + if len(includes) > 0 { + allowed := false + for _, pattern := range includes { + if patternMatch(pattern, rel) { + allowed = true + break + } + } + if !allowed { + return false + } + } + for _, pattern := range excludes { + if patternMatch(pattern, rel) { + return false + } + } + return true +} + +func patternMatch(pattern string, value string) bool { + pattern = strings.TrimSpace(pattern) + if pattern == "" { + return false + } + ok, err := path.Match(pattern, value) + return err == nil && ok +} + +func ensureDriveSyncExcludes(excludes []string) []string { + seen := map[string]bool{} + out := make([]string, 0, len(excludes)+1) + for _, ex := range excludes { + ex = strings.TrimSpace(ex) + if ex == "" || seen[ex] { + continue + } + seen[ex] = true + out = append(out, ex) + } + if !seen[driveSyncStateFile] { + out = append(out, driveSyncStateFile) + } + return out +} + +func fileMD5(filePath string) (string, error) { + f, err := os.Open(filePath) //nolint:gosec // user-provided path + if err != nil { + return "", err + } + defer f.Close() + + hash := md5.New() //nolint:gosec // non-cryptographic checksum + if _, err := io.Copy(hash, f); err != nil { + return "", err + } + return hex.EncodeToString(hash.Sum(nil)), nil +} + +type driveSyncAction struct { + Type string `json:"type"` + Path string `json:"path"` + Drive string `json:"driveId,omitempty"` + MimeType string `json:"mimeType,omitempty"` + Reason string `json:"reason,omitempty"` +} + +type driveSyncPlan struct { + Actions []driveSyncAction `json:"actions"` + Summary driveSyncSummary `json:"summary"` +} + +type driveSyncSummary struct { + Download int `json:"download"` + Upload int `json:"upload"` + DeleteLocal int `json:"deleteLocal"` + DeleteDrive int `json:"deleteDrive"` + MkdirLocal int `json:"mkdirLocal"` + MkdirDrive int `json:"mkdirDrive"` +} + +func (p driveSyncPlan) HasDeletes() bool { + return p.Summary.DeleteLocal > 0 || p.Summary.DeleteDrive > 0 +} + +func buildDrivePullPlan(remoteFiles map[string]driveTreeItem, remoteFolders map[string]driveTreeItem, localFiles map[string]localFileInfo, cfg driveSyncConfig, allowDelete bool, checksum bool) driveSyncPlan { + plan := driveSyncPlan{} + seenLocal := map[string]bool{} + + for relPath, remote := range remoteFiles { + if !allowSyncPath(relPath, cfg.Include, cfg.Exclude) { + continue + } + local, ok := localFiles[relPath] + if !ok { + plan.Actions = append(plan.Actions, driveSyncAction{Type: "download", Path: relPath, Drive: remote.ID, MimeType: remote.MimeType, Reason: "missing"}) + plan.Summary.Download++ + ensurePlanDirs(&plan, path.Dir(relPath), "mkdir_local") + continue + } + seenLocal[relPath] = true + if needsPull(remote, local, checksum) { + plan.Actions = append(plan.Actions, driveSyncAction{Type: "download", Path: relPath, Drive: remote.ID, MimeType: remote.MimeType, Reason: "changed"}) + plan.Summary.Download++ + ensurePlanDirs(&plan, path.Dir(relPath), "mkdir_local") + } + } + + if allowDelete { + for relPath := range localFiles { + if !allowSyncPath(relPath, cfg.Include, cfg.Exclude) { + continue + } + if _, ok := remoteFiles[relPath]; ok { + continue + } + if seenLocal[relPath] { + continue + } + plan.Actions = append(plan.Actions, driveSyncAction{Type: "delete_local", Path: relPath, Reason: "not in Drive"}) + plan.Summary.DeleteLocal++ + } + } + + for folderPath := range remoteFolders { + if !allowSyncPath(folderPath, cfg.Include, cfg.Exclude) { + continue + } + if folderPath == "" { + continue + } + ensurePlanDirs(&plan, folderPath, "mkdir_local") + } + + return plan +} + +func buildDrivePushPlan(remoteFiles map[string]driveTreeItem, localFiles map[string]localFileInfo, cfg driveSyncConfig, allowDelete bool, checksum bool) driveSyncPlan { + plan := driveSyncPlan{} + seenRemote := map[string]bool{} + + for relPath, local := range localFiles { + if !allowSyncPath(relPath, cfg.Include, cfg.Exclude) { + continue + } + remote, ok := remoteFiles[relPath] + if !ok { + plan.Actions = append(plan.Actions, driveSyncAction{Type: "upload", Path: relPath, Reason: "missing"}) + plan.Summary.Upload++ + ensurePlanDirs(&plan, path.Dir(relPath), "mkdir_drive") + continue + } + seenRemote[relPath] = true + if needsPush(remote, local, checksum) { + plan.Actions = append(plan.Actions, driveSyncAction{Type: "upload", Path: relPath, Drive: remote.ID, Reason: "changed"}) + plan.Summary.Upload++ + } + } + + if allowDelete { + for relPath, remote := range remoteFiles { + if !allowSyncPath(relPath, cfg.Include, cfg.Exclude) { + continue + } + if _, ok := localFiles[relPath]; ok { + continue + } + if seenRemote[relPath] { + continue + } + plan.Actions = append(plan.Actions, driveSyncAction{Type: "delete_drive", Path: relPath, Drive: remote.ID, Reason: "not local"}) + plan.Summary.DeleteDrive++ + } + } + + return plan +} + +func ensurePlanDirs(plan *driveSyncPlan, dir string, actionType string) { + dir = strings.TrimSpace(dir) + for dir != "" && dir != "." && dir != "/" { + if !hasAction(plan.Actions, actionType, dir) { + plan.Actions = append(plan.Actions, driveSyncAction{Type: actionType, Path: dir}) + switch actionType { + case "mkdir_local": + plan.Summary.MkdirLocal++ + case "mkdir_drive": + plan.Summary.MkdirDrive++ + } + } + next := path.Dir(dir) + if next == dir { + break + } + dir = next + } +} + +func hasAction(actions []driveSyncAction, actionType string, actionPath string) bool { + for _, a := range actions { + if a.Type == actionType && a.Path == actionPath { + return true + } + } + return false +} + +func needsPull(remote driveTreeItem, local localFileInfo, checksum bool) bool { + if checksum && remote.MD5 != "" && local.MD5 != "" { + return remote.MD5 != local.MD5 + } + if remote.Size > 0 && remote.Size != local.Size { + return true + } + remoteTime, err := parseDriveTime(remote.ModifiedTime) + if err != nil { + return false + } + return remoteTime.After(local.ModTime.Add(timeSkewTolerance)) +} + +func needsPush(remote driveTreeItem, local localFileInfo, checksum bool) bool { + if strings.HasPrefix(remote.MimeType, "application/vnd.google-apps.") { + return false + } + if checksum && remote.MD5 != "" && local.MD5 != "" { + return remote.MD5 != local.MD5 + } + if remote.Size > 0 && remote.Size != local.Size { + return true + } + remoteTime, err := parseDriveTime(remote.ModifiedTime) + if err != nil { + return true + } + return local.ModTime.After(remoteTime.Add(timeSkewTolerance)) +} + +func parseDriveTime(raw string) (time.Time, error) { + raw = strings.TrimSpace(raw) + if raw == "" { + return time.Time{}, errors.New("empty time") + } + if t, err := time.Parse(time.RFC3339Nano, raw); err == nil { + return t, nil + } + return time.Parse(time.RFC3339, raw) +} + +func outputDriveSyncPlan(ctx context.Context, u *ui.UI, plan driveSyncPlan) error { + if outfmt.IsJSON(ctx) { + return outfmt.WriteJSON(ctx, os.Stdout, plan) + } + + w, flush := tableWriter(ctx) + defer flush() + fmt.Fprintln(w, "ACTION\tPATH") + for _, action := range plan.Actions { + if action.Type == "" { + continue + } + fmt.Fprintf(w, "%s\t%s\n", action.Type, sanitizeTab(action.Path)) + } + if u != nil { + u.Err().Printf("downloads\t%d", plan.Summary.Download) + u.Err().Printf("uploads\t%d", plan.Summary.Upload) + u.Err().Printf("delete_local\t%d", plan.Summary.DeleteLocal) + u.Err().Printf("delete_drive\t%d", plan.Summary.DeleteDrive) + u.Err().Printf("mkdir_local\t%d", plan.Summary.MkdirLocal) + u.Err().Printf("mkdir_drive\t%d", plan.Summary.MkdirDrive) + } + return nil +} + +func applyDrivePullPlan(ctx context.Context, svc *drive.Service, rootPath string, plan driveSyncPlan) error { + for _, action := range plan.Actions { + if action.Type != "mkdir_local" { + continue + } + dir := filepath.Join(rootPath, filepath.FromSlash(action.Path)) + if err := os.MkdirAll(dir, 0o750); err != nil { + return err + } + } + for _, action := range plan.Actions { + if action.Type != "download" { + continue + } + dest := filepath.Join(rootPath, filepath.FromSlash(action.Path)) + if _, _, err := downloadDriveFile(ctx, svc, &drive.File{ + Id: action.Drive, + Name: filepath.Base(dest), + MimeType: action.MimeType, + }, dest, ""); err != nil { + return err + } + } + for _, action := range plan.Actions { + if action.Type != "delete_local" { + continue + } + target := filepath.Join(rootPath, filepath.FromSlash(action.Path)) + if err := os.Remove(target); err != nil && !os.IsNotExist(err) { + return err + } + } + return nil +} + +func applyDrivePushPlan(ctx context.Context, svc *drive.Service, rootID string, rootPath string, remoteFolders map[string]driveTreeItem, plan driveSyncPlan) error { + folderCache := map[string]string{"": rootID} + for relPath, folder := range remoteFolders { + if relPath == "" { + continue + } + folderCache[relPath] = folder.ID + } + + for _, action := range plan.Actions { + if action.Type != "mkdir_drive" { + continue + } + if _, err := ensureDriveFolder(ctx, svc, rootID, action.Path, folderCache); err != nil { + return err + } + } + for _, action := range plan.Actions { + if action.Type != "upload" { + continue + } + localPath := filepath.Join(rootPath, filepath.FromSlash(action.Path)) + dirPath := path.Dir(action.Path) + parentID, err := ensureDriveFolder(ctx, svc, rootID, dirPath, folderCache) + if err != nil { + return err + } + + f, err := os.Open(localPath) //nolint:gosec // user-provided path + if err != nil { + return err + } + defer f.Close() + + mimeType := guessMimeType(localPath) + meta := &drive.File{Name: filepath.Base(localPath)} + + var created *drive.File + if action.Drive != "" { + created, err = svc.Files.Update(action.Drive, meta). + SupportsAllDrives(true). + Media(f, gapi.ContentType(mimeType)). + Fields("id"). + Context(ctx). + Do() + } else { + meta.Parents = []string{parentID} + created, err = svc.Files.Create(meta). + SupportsAllDrives(true). + Media(f, gapi.ContentType(mimeType)). + Fields("id"). + Context(ctx). + Do() + } + if err != nil { + return err + } + _ = created + } + for _, action := range plan.Actions { + if action.Type != "delete_drive" { + continue + } + if err := svc.Files.Delete(action.Drive).SupportsAllDrives(true).Context(ctx).Do(); err != nil { + return err + } + } + return nil +} + +func ensureDriveFolder(ctx context.Context, svc *drive.Service, rootID string, dirPath string, cache map[string]string) (string, error) { + dirPath = strings.TrimSpace(dirPath) + if dirPath == "" || dirPath == "." || dirPath == "/" { + return rootID, nil + } + if id, ok := cache[dirPath]; ok { + return id, nil + } + + parts := strings.Split(dirPath, "/") + parentID := rootID + curPath := "" + for _, part := range parts { + if part == "" || part == "." { + continue + } + nextPath := part + if curPath != "" { + nextPath = curPath + "/" + part + } + if id, ok := cache[nextPath]; ok { + parentID = id + curPath = nextPath + continue + } + + folder := &drive.File{ + Name: part, + MimeType: driveMimeFolder, + Parents: []string{parentID}, + } + created, err := svc.Files.Create(folder). + SupportsAllDrives(true). + Fields("id"). + Context(ctx). + Do() + if err != nil { + return "", err + } + cache[nextPath] = created.Id + parentID = created.Id + curPath = nextPath + } + return parentID, nil +} diff --git a/internal/cmd/drive_sync_test.go b/internal/cmd/drive_sync_test.go new file mode 100644 index 00000000..c1b6d17a --- /dev/null +++ b/internal/cmd/drive_sync_test.go @@ -0,0 +1,66 @@ +package cmd + +import ( + "testing" + "time" +) + +func TestAllowSyncPath(t *testing.T) { + includes := []string{"foo/*.txt"} + excludes := []string{"foo/bad.txt"} + + if !allowSyncPath("foo/good.txt", includes, excludes) { + t.Fatalf("expected allow for good.txt") + } + if allowSyncPath("foo/bad.txt", includes, excludes) { + t.Fatalf("expected block for bad.txt") + } + if allowSyncPath("bar.txt", includes, excludes) { + t.Fatalf("expected block for bar.txt") + } +} + +func TestNeedsPushSkipsGoogleDocs(t *testing.T) { + remote := driveTreeItem{MimeType: driveMimeGoogleDoc} + local := localFileInfo{} + if needsPush(remote, local, false) { + t.Fatalf("expected google doc to be skipped on push") + } +} + +func TestBuildDrivePullPlan(t *testing.T) { + now := time.Now().UTC() + remoteFiles := map[string]driveTreeItem{ + "a.txt": {ID: "1", Size: 10, ModifiedTime: now.Format(time.RFC3339)}, + "b.txt": {ID: "2", Size: 10, ModifiedTime: now.Format(time.RFC3339)}, + } + localFiles := map[string]localFileInfo{ + "a.txt": {Size: 10, ModTime: now}, + "c.txt": {Size: 5, ModTime: now}, + } + cfg := driveSyncConfig{} + + plan := buildDrivePullPlan(remoteFiles, nil, localFiles, cfg, true, false) + if plan.Summary.Download != 1 { + t.Fatalf("download count = %d, want 1", plan.Summary.Download) + } + if plan.Summary.DeleteLocal != 1 { + t.Fatalf("delete_local count = %d, want 1", plan.Summary.DeleteLocal) + } +} + +func TestBuildDrivePushPlan(t *testing.T) { + now := time.Now().UTC() + remoteFiles := map[string]driveTreeItem{ + "a.txt": {ID: "1", Size: 5, ModifiedTime: now.Add(-time.Hour).Format(time.RFC3339)}, + } + localFiles := map[string]localFileInfo{ + "a.txt": {Size: 10, ModTime: now}, + } + cfg := driveSyncConfig{} + + plan := buildDrivePushPlan(remoteFiles, localFiles, cfg, false, false) + if plan.Summary.Upload != 1 { + t.Fatalf("upload count = %d, want 1", plan.Summary.Upload) + } +}