summaryrefslogtreecommitdiff
path: root/internal/api/plantoeat.go
diff options
context:
space:
mode:
authorPeter Stone <thepeterstone@gmail.com>2026-01-24 20:12:07 -1000
committerPeter Stone <thepeterstone@gmail.com>2026-01-24 20:12:07 -1000
commitc290113bd1a8af694b648bba4c801e00b049683a (patch)
tree75d066a74d0f3e596d3fbe5bd89f8e2d449ca011 /internal/api/plantoeat.go
parentb69d2d5fc8779f43b1ac789605318488efc91361 (diff)
Add Shopping tab combining Trello and PlanToEat lists
- New Shopping tab aggregates items from Trello Shopping board and PlanToEat - Items grouped by store, then by category (for PlanToEat) - Trello list names treated as store names - Replace PlanToEat meals API with web scraping (uses session cookie) - Add error logging for PlanToEat fetch operations - Recipe links now point to cooking view (/recipes/{id}/cook/{event_id}) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
Diffstat (limited to 'internal/api/plantoeat.go')
-rw-r--r--internal/api/plantoeat.go285
1 files changed, 257 insertions, 28 deletions
diff --git a/internal/api/plantoeat.go b/internal/api/plantoeat.go
index eb29c63..a7fdf58 100644
--- a/internal/api/plantoeat.go
+++ b/internal/api/plantoeat.go
@@ -3,17 +3,27 @@ package api
import (
"context"
"fmt"
+ "io"
+ "log"
+ "net/http"
+ "strings"
"time"
+ "github.com/PuerkitoBio/goquery"
+
"task-dashboard/internal/models"
)
-const planToEatBaseURL = "https://www.plantoeat.com/api/v2"
+const (
+ planToEatBaseURL = "https://www.plantoeat.com/api/v2"
+ planToEatWebURL = "https://app.plantoeat.com"
+)
// PlanToEatClient handles interactions with the PlanToEat API
type PlanToEatClient struct {
BaseClient
- apiKey string
+ apiKey string
+ sessionCookie string // For web scraping endpoints
}
// NewPlanToEatClient creates a new PlanToEat API client
@@ -24,6 +34,11 @@ func NewPlanToEatClient(apiKey string) *PlanToEatClient {
}
}
+// SetSessionCookie sets the session cookie for web scraping endpoints
+func (c *PlanToEatClient) SetSessionCookie(cookie string) {
+ c.sessionCookie = cookie
+}
+
func (c *PlanToEatClient) authHeaders() map[string]string {
return map[string]string{"Authorization": "Bearer " + c.apiKey}
}
@@ -45,53 +60,146 @@ type planToEatResponse struct {
Items []planToEatPlannerItem `json:"items"`
}
-// GetUpcomingMeals fetches meals for the next N days
+// GetUpcomingMeals fetches meals by scraping the planner web interface
+// Requires a valid session cookie set via SetSessionCookie
func (c *PlanToEatClient) GetUpcomingMeals(ctx context.Context, days int) ([]models.Meal, error) {
+ if c.sessionCookie == "" {
+ return nil, fmt.Errorf("session cookie required for meals - use SetSessionCookie")
+ }
+
if days <= 0 {
days = 7
}
- startDate := time.Now()
- endDate := startDate.AddDate(0, 0, days)
+ req, err := http.NewRequestWithContext(ctx, "GET", planToEatWebURL+"/planner", nil)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create request: %w", err)
+ }
+
+ req.Header.Set("Cookie", c.sessionCookie)
+ req.Header.Set("Accept", "text/html")
+ req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36")
- path := fmt.Sprintf("/planner_items?start_date=%s&end_date=%s",
- startDate.Format("2006-01-02"),
- endDate.Format("2006-01-02"))
+ resp, err := c.HTTPClient.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("request failed: %w", err)
+ }
+ defer resp.Body.Close()
+
+ log.Printf("DEBUG [PlanToEat/Meals]: Response status %d", resp.StatusCode)
+
+ if resp.StatusCode != http.StatusOK {
+ return nil, fmt.Errorf("unexpected status %d (session may be expired)", resp.StatusCode)
+ }
+
+ return parsePlannerHTML(resp.Body, days)
+}
+
+// parsePlannerHTML extracts meals from the planner page HTML
+func parsePlannerHTML(body io.Reader, days int) ([]models.Meal, error) {
+ doc, err := goquery.NewDocumentFromReader(body)
+ if err != nil {
+ return nil, fmt.Errorf("failed to parse HTML: %w", err)
+ }
- var apiResponse planToEatResponse
- if err := c.Get(ctx, path, c.authHeaders(), &apiResponse); err != nil {
- return nil, fmt.Errorf("failed to fetch meals: %w", err)
+ // Check for login page
+ title := doc.Find("title").First().Text()
+ titleLower := strings.ToLower(title)
+ log.Printf("DEBUG [PlanToEat/Meals]: Page title: %q", title)
+ if strings.Contains(titleLower, "login") || strings.Contains(titleLower, "log in") || strings.Contains(titleLower, "sign in") {
+ return nil, fmt.Errorf("session expired - got login page (title: %s)", title)
}
- meals := make([]models.Meal, 0, len(apiResponse.Items))
- for _, item := range apiResponse.Items {
- mealDate, err := time.Parse("2006-01-02", item.Date)
+ var meals []models.Meal
+ now := time.Now()
+ today := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, now.Location())
+ endDate := today.AddDate(0, 0, days)
+
+ // PlanToEat structure:
+ // - Day cells have data-date="YYYY-MM-DD"
+ // - Within each day, sections: <div class="time" data-section="breakfast|lunch|dinner|xtra">
+ // - Meal items inside sections: <div data-id="..." data-recipe-id="...">
+ // - Recipe name: <a class="title recipe">Name</a>
+ // - Non-recipe: <span class="title item">Name</span>
+
+ // Find all day cells with data-date
+ doc.Find("[data-date]").Each(func(_ int, dayEl *goquery.Selection) {
+ dateStr, exists := dayEl.Attr("data-date")
+ if !exists {
+ return
+ }
+
+ mealDate, err := time.Parse("2006-01-02", dateStr)
if err != nil {
- continue
+ return
}
- meals = append(meals, models.Meal{
- ID: fmt.Sprintf("%d", item.ID),
- RecipeName: item.Recipe.Title,
- Date: mealDate,
- MealType: normalizeMealType(item.MealType),
- RecipeURL: item.Recipe.URL,
+ // Skip dates outside our range
+ if mealDate.Before(today) || mealDate.After(endDate) {
+ return
+ }
+
+ // Find meal sections within this day
+ dayEl.Find("div.time[data-section]").Each(func(_ int, sectionEl *goquery.Selection) {
+ section, _ := sectionEl.Attr("data-section")
+ mealType := normalizeMealType(section)
+
+ // Find meal items within this section
+ sectionEl.Find("[data-id]").Each(func(_ int, itemEl *goquery.Selection) {
+ meal := models.Meal{
+ Date: mealDate,
+ MealType: mealType,
+ }
+
+ // Get ID
+ if id, exists := itemEl.Attr("data-id"); exists {
+ meal.ID = id
+ }
+
+ // Try to get recipe name from a.title.recipe or span.title.item
+ if recipeLink := itemEl.Find("a.title.recipe, a.title").First(); recipeLink.Length() > 0 {
+ meal.RecipeName = strings.TrimSpace(recipeLink.Text())
+ if href, exists := recipeLink.Attr("href"); exists {
+ // Convert /recipes/{id}/{event_id} to /recipes/{id}/cook/{event_id}
+ if strings.HasPrefix(href, "/recipes/") {
+ parts := strings.Split(href, "/")
+ if len(parts) == 4 { // ["", "recipes", "id", "event_id"]
+ href = fmt.Sprintf("/recipes/%s/cook/%s", parts[2], parts[3])
+ }
+ }
+ if !strings.HasPrefix(href, "http") {
+ meal.RecipeURL = planToEatWebURL + href
+ } else {
+ meal.RecipeURL = href
+ }
+ }
+ } else if titleSpan := itemEl.Find("span.title.item, span.title").First(); titleSpan.Length() > 0 {
+ meal.RecipeName = strings.TrimSpace(titleSpan.Text())
+ }
+
+ if meal.RecipeName != "" {
+ meals = append(meals, meal)
+ }
+ })
})
- }
+ })
+
+ log.Printf("DEBUG [PlanToEat/Meals]: Found %d meals", len(meals))
return meals, nil
}
-// normalizeMealType ensures meal type matches our expected values
+// normalizeMealType ensures meal type matches DB constraint (breakfast, lunch, dinner, snack)
func normalizeMealType(mealType string) string {
- switch mealType {
- case "breakfast", "Breakfast":
+ lower := strings.ToLower(strings.TrimSpace(mealType))
+ switch lower {
+ case "breakfast":
return "breakfast"
- case "lunch", "Lunch":
+ case "lunch":
return "lunch"
- case "dinner", "Dinner":
+ case "dinner":
return "dinner"
- case "snack", "Snack":
+ case "snack", "xtra", "snacks":
return "snack"
default:
return "dinner"
@@ -107,3 +215,124 @@ func (c *PlanToEatClient) GetRecipes(ctx context.Context) error {
func (c *PlanToEatClient) AddMealToPlanner(ctx context.Context, recipeID string, date time.Time, mealType string) error {
return fmt.Errorf("not implemented yet")
}
+
+// GetShoppingList fetches the shopping list by scraping the web interface
+// Requires a valid session cookie set via SetSessionCookie
+func (c *PlanToEatClient) GetShoppingList(ctx context.Context) ([]models.ShoppingItem, error) {
+ if c.sessionCookie == "" {
+ return nil, fmt.Errorf("session cookie required for shopping list - use SetSessionCookie")
+ }
+
+ req, err := http.NewRequestWithContext(ctx, "GET", planToEatWebURL+"/shopping_lists", nil)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create request: %w", err)
+ }
+
+ req.Header.Set("Cookie", c.sessionCookie)
+ req.Header.Set("Accept", "text/html")
+ req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36")
+
+ log.Printf("DEBUG [PlanToEat/Shopping]: Fetching %s", planToEatWebURL+"/shopping_lists")
+
+ resp, err := c.HTTPClient.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("request failed: %w", err)
+ }
+ defer resp.Body.Close()
+
+ log.Printf("DEBUG [PlanToEat/Shopping]: Response status %d", resp.StatusCode)
+
+ if resp.StatusCode != http.StatusOK {
+ return nil, fmt.Errorf("unexpected status %d (session may be expired)", resp.StatusCode)
+ }
+
+ return parseShoppingListHTML(resp.Body)
+}
+
+// parseShoppingListHTML extracts shopping items from the HTML response
+func parseShoppingListHTML(body io.Reader) ([]models.ShoppingItem, error) {
+ doc, err := goquery.NewDocumentFromReader(body)
+ if err != nil {
+ return nil, fmt.Errorf("failed to parse HTML: %w", err)
+ }
+
+ var items []models.ShoppingItem
+ currentStore := ""
+ currentCategory := ""
+
+ // Debug: log page title and structure hints
+ title := doc.Find("title").First().Text()
+ log.Printf("DEBUG [PlanToEat/Shopping]: Page title: %q", title)
+
+ // Check if we got a login page
+ titleLower := strings.ToLower(title)
+ if strings.Contains(titleLower, "login") || strings.Contains(titleLower, "log in") || strings.Contains(titleLower, "sign in") {
+ return nil, fmt.Errorf("session expired - got login page (title: %s)", title)
+ }
+
+ // Debug: count elements to understand structure
+ storeCount := doc.Find("div.store").Length()
+ log.Printf("DEBUG [PlanToEat/Shopping]: Found %d div.store elements", storeCount)
+
+ // Iterate through stores
+ doc.Find("div.store").Each(func(_ int, store *goquery.Selection) {
+ currentStore = strings.TrimSpace(store.Find("h4.store_name").First().Text())
+ // Clean up store name (remove count and icons)
+ if idx := strings.Index(currentStore, "("); idx > 0 {
+ currentStore = strings.TrimSpace(currentStore[:idx])
+ }
+
+ log.Printf("DEBUG [PlanToEat/Shopping]: Processing store: %q", currentStore)
+
+ // Iterate through categories within store
+ store.Find("div.category-box").Each(func(_ int, catBox *goquery.Selection) {
+ currentCategory = strings.TrimSpace(catBox.Find("p.category-title span").First().Text())
+
+ // Iterate through items in category
+ catBox.Find("li.sli").Each(func(_ int, li *goquery.Selection) {
+ item := models.ShoppingItem{
+ Store: currentStore,
+ Category: currentCategory,
+ }
+
+ // Extract ID from class (e.g., "sli i493745889")
+ if class, exists := li.Attr("class"); exists {
+ for _, c := range strings.Fields(class) {
+ if strings.HasPrefix(c, "i") && len(c) > 1 {
+ item.ID = c[1:] // Remove 'i' prefix
+ break
+ }
+ }
+ }
+
+ // Extract name
+ item.Name = strings.TrimSpace(li.Find("strong").First().Text())
+
+ // Extract quantity
+ item.Quantity = strings.TrimSpace(li.Find("span.quan").First().Text())
+ // Clean up HTML entities in quantity
+ item.Quantity = cleanQuantity(item.Quantity)
+
+ // Check if item is checked (has specific class or attribute)
+ if li.HasClass("checked") || li.HasClass("crossed") {
+ item.Checked = true
+ }
+
+ if item.Name != "" {
+ items = append(items, item)
+ }
+ })
+ })
+ })
+
+ log.Printf("DEBUG [PlanToEat/Shopping]: Parsed %d items total", len(items))
+
+ return items, nil
+}
+
+// cleanQuantity removes HTML entities and extra whitespace from quantity strings
+func cleanQuantity(q string) string {
+ q = strings.ReplaceAll(q, "\u00a0", " ") // non-breaking space
+ q = strings.ReplaceAll(q, " ", " ")
+ return strings.TrimSpace(q)
+}