diff options
| author | Peter Stone <thepeterstone@gmail.com> | 2026-01-12 09:27:16 -1000 |
|---|---|---|
| committer | Peter Stone <thepeterstone@gmail.com> | 2026-01-12 09:27:16 -1000 |
| commit | 9fe0998436488537a8a2e8ffeefb0c4424b41c60 (patch) | |
| tree | ce877f04e60a187c2bd0e481e80298ec5e7cdf80 /internal/api/obsidian.go | |
Initial commit: Personal Consolidation Dashboard (Phase 1 Complete)
Implemented a unified web dashboard aggregating tasks, notes, and meal planning:
Core Features:
- Trello integration (PRIMARY feature - boards, cards, lists)
- Todoist integration (tasks and projects)
- Obsidian integration (20 most recent notes)
- PlanToEat integration (optional - 7-day meal planning)
- Mobile-responsive web UI with auto-refresh (5 min)
- SQLite caching with 5-minute TTL
- AI agent endpoint with Bearer token authentication
Technical Implementation:
- Go 1.21+ backend with chi router
- Interface-based API client design for testability
- Parallel data fetching with goroutines
- Graceful degradation (partial data on API failures)
- .env file loading with godotenv
- Comprehensive test coverage (9/9 tests passing)
Bug Fixes:
- Fixed .env file not being loaded at startup
- Fixed nil pointer dereference with optional API clients (typed nil interface gotcha)
Documentation:
- START_HERE.md - Quick 5-minute setup guide
- QUICKSTART.md - Fast track setup
- SETUP_GUIDE.md - Detailed step-by-step instructions
- PROJECT_SUMMARY.md - Complete project overview
- CLAUDE.md - Guide for Claude Code instances
- AI_AGENT_ACCESS.md - AI agent design document
- AI_AGENT_SETUP.md - Claude.ai integration guide
- TRELLO_AUTH_UPDATE.md - New Power-Up auth process
Statistics:
- Binary: 17MB
- Code: 2,667 lines
- Tests: 5 unit + 4 acceptance tests (all passing)
- Dependencies: chi, sqlite3, godotenv
Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
Diffstat (limited to 'internal/api/obsidian.go')
| -rw-r--r-- | internal/api/obsidian.go | 216 |
1 files changed, 216 insertions, 0 deletions
diff --git a/internal/api/obsidian.go b/internal/api/obsidian.go new file mode 100644 index 0000000..a8ba80d --- /dev/null +++ b/internal/api/obsidian.go @@ -0,0 +1,216 @@ +package api + +import ( + "bufio" + "context" + "fmt" + "os" + "path/filepath" + "regexp" + "sort" + "strings" + "time" + + "task-dashboard/internal/models" +) + +// ObsidianClient handles reading notes from an Obsidian vault +type ObsidianClient struct { + vaultPath string +} + +// NewObsidianClient creates a new Obsidian vault reader +func NewObsidianClient(vaultPath string) *ObsidianClient { + return &ObsidianClient{ + vaultPath: vaultPath, + } +} + +// fileInfo holds file metadata for sorting +type fileInfo struct { + path string + modTime time.Time +} + +// GetNotes reads and returns the most recently modified notes from the vault +func (c *ObsidianClient) GetNotes(ctx context.Context, limit int) ([]models.Note, error) { + if c.vaultPath == "" { + return nil, fmt.Errorf("obsidian vault path not configured") + } + + // Check if vault path exists + if _, err := os.Stat(c.vaultPath); os.IsNotExist(err) { + return nil, fmt.Errorf("vault path does not exist: %s", c.vaultPath) + } + + // Collect all markdown files with their modification times + var files []fileInfo + + err := filepath.Walk(c.vaultPath, func(path string, info os.FileInfo, err error) error { + if err != nil { + return nil // Skip files we can't access + } + + // Skip directories and non-markdown files + if info.IsDir() || !strings.HasSuffix(info.Name(), ".md") { + return nil + } + + // Skip hidden files and directories + if strings.HasPrefix(info.Name(), ".") { + return nil + } + + files = append(files, fileInfo{ + path: path, + modTime: info.ModTime(), + }) + + return nil + }) + + if err != nil { + return nil, fmt.Errorf("failed to walk vault directory: %w", err) + } + + // Sort by modification time (most recent first) + sort.Slice(files, func(i, j int) bool { + return files[i].modTime.After(files[j].modTime) + }) + + // Limit the number of files to process + if limit > 0 && len(files) > limit { + files = files[:limit] + } + + // Parse each file + notes := make([]models.Note, 0, len(files)) + for _, file := range files { + note, err := c.parseMarkdownFile(file.path, file.modTime) + if err != nil { + // Skip files that fail to parse + continue + } + notes = append(notes, *note) + } + + return notes, nil +} + +// parseMarkdownFile reads and parses a markdown file +func (c *ObsidianClient) parseMarkdownFile(path string, modTime time.Time) (*models.Note, error) { + file, err := os.Open(path) + if err != nil { + return nil, err + } + defer file.Close() + + scanner := bufio.NewScanner(file) + + var content strings.Builder + var tags []string + inFrontmatter := false + lineCount := 0 + + // Parse file + for scanner.Scan() { + line := scanner.Text() + lineCount++ + + // Check for YAML frontmatter + if lineCount == 1 && line == "---" { + inFrontmatter = true + continue + } + + if inFrontmatter { + if line == "---" { + inFrontmatter = false + continue + } + // Extract tags from frontmatter + if strings.HasPrefix(line, "tags:") { + tagsStr := strings.TrimPrefix(line, "tags:") + tagsStr = strings.Trim(tagsStr, " []") + if tagsStr != "" { + tags = strings.Split(tagsStr, ",") + for i, tag := range tags { + tags[i] = strings.TrimSpace(tag) + } + } + } + continue + } + + // Add to content (limit to preview) + if content.Len() < 500 { // Limit to ~500 chars + content.WriteString(line) + content.WriteString("\n") + } + } + + if err := scanner.Err(); err != nil { + return nil, err + } + + // Extract inline tags (e.g., #tag) + inlineTags := extractInlineTags(content.String()) + tags = append(tags, inlineTags...) + tags = uniqueStrings(tags) + + // Get filename and title + filename := filepath.Base(path) + title := strings.TrimSuffix(filename, ".md") + + // Try to extract title from first H1 heading + contentStr := content.String() + h1Regex := regexp.MustCompile(`^#\s+(.+)$`) + lines := strings.Split(contentStr, "\n") + for _, line := range lines { + if matches := h1Regex.FindStringSubmatch(line); len(matches) > 1 { + title = matches[1] + break + } + } + + note := &models.Note{ + Filename: filename, + Title: title, + Content: strings.TrimSpace(contentStr), + Modified: modTime, + Path: path, + Tags: tags, + } + + return note, nil +} + +// extractInlineTags finds all #tags in the content +func extractInlineTags(content string) []string { + tagRegex := regexp.MustCompile(`#([a-zA-Z0-9_-]+)`) + matches := tagRegex.FindAllStringSubmatch(content, -1) + + tags := make([]string, 0, len(matches)) + for _, match := range matches { + if len(match) > 1 { + tags = append(tags, match[1]) + } + } + + return tags +} + +// uniqueStrings returns a slice with duplicate strings removed +func uniqueStrings(slice []string) []string { + seen := make(map[string]bool) + result := make([]string, 0, len(slice)) + + for _, item := range slice { + if !seen[item] && item != "" { + seen[item] = true + result = append(result, item) + } + } + + return result +} |
