summaryrefslogtreecommitdiff
path: root/internal/api/plantoeat.go
blob: 5c24cc107bb07d9b1c14f0d48331298209494472 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
package api

import (
	"context"
	"fmt"
	"io"
	"log"
	"net/http"
	"strings"
	"time"

	"github.com/PuerkitoBio/goquery"

	"task-dashboard/internal/models"
)

const (
	planToEatBaseURL = "https://www.plantoeat.com/api/v2"
	planToEatWebURL  = "https://app.plantoeat.com"
)

// PlanToEatClient handles interactions with the PlanToEat API
type PlanToEatClient struct {
	BaseClient
	apiKey        string
	sessionCookie string // For web scraping endpoints
}

// NewPlanToEatClient creates a new PlanToEat API client
func NewPlanToEatClient(apiKey string) *PlanToEatClient {
	return &PlanToEatClient{
		BaseClient: NewBaseClient(planToEatBaseURL),
		apiKey:     apiKey,
	}
}

// SetSessionCookie sets the session cookie for web scraping endpoints
func (c *PlanToEatClient) SetSessionCookie(cookie string) {
	c.sessionCookie = cookie
}

// GetUpcomingMeals fetches meals by scraping the planner web interface
// Requires a valid session cookie set via SetSessionCookie
func (c *PlanToEatClient) GetUpcomingMeals(ctx context.Context, days int) ([]models.Meal, error) {
	if c.sessionCookie == "" {
		return nil, fmt.Errorf("session cookie required for meals - use SetSessionCookie")
	}

	if days <= 0 {
		days = 7
	}

	req, err := http.NewRequestWithContext(ctx, "GET", planToEatWebURL+"/planner", nil)
	if err != nil {
		return nil, fmt.Errorf("failed to create request: %w", err)
	}

	req.Header.Set("Cookie", c.sessionCookie)
	req.Header.Set("Accept", "text/html")
	req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36")

	resp, err := c.HTTPClient.Do(req)
	if err != nil {
		return nil, fmt.Errorf("request failed: %w", err)
	}
	defer func() { _ = resp.Body.Close() }()

	log.Printf("DEBUG [PlanToEat/Meals]: Response status %d", resp.StatusCode)

	if resp.StatusCode != http.StatusOK {
		return nil, fmt.Errorf("unexpected status %d (session may be expired)", resp.StatusCode)
	}

	return parsePlannerHTML(resp.Body, days)
}

// parsePlannerHTML extracts meals from the planner page HTML
func parsePlannerHTML(body io.Reader, days int) ([]models.Meal, error) {
	doc, err := goquery.NewDocumentFromReader(body)
	if err != nil {
		return nil, fmt.Errorf("failed to parse HTML: %w", err)
	}

	// Check for login page
	title := doc.Find("title").First().Text()
	titleLower := strings.ToLower(title)
	log.Printf("DEBUG [PlanToEat/Meals]: Page title: %q", title)
	if strings.Contains(titleLower, "login") || strings.Contains(titleLower, "log in") || strings.Contains(titleLower, "sign in") {
		return nil, fmt.Errorf("session expired - got login page (title: %s)", title)
	}

	var meals []models.Meal
	now := time.Now()
	today := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, now.Location())
	endDate := today.AddDate(0, 0, days)

	// PlanToEat structure:
	// - Day cells have data-date="YYYY-MM-DD"
	// - Within each day, sections: <div class="time" data-section="breakfast|lunch|dinner|xtra">
	// - Meal items inside sections: <div data-id="..." data-recipe-id="...">
	// - Recipe name: <a class="title recipe">Name</a>
	// - Non-recipe: <span class="title item">Name</span>

	// Find all day cells with data-date
	doc.Find("[data-date]").Each(func(_ int, dayEl *goquery.Selection) {
		dateStr, exists := dayEl.Attr("data-date")
		if !exists {
			return
		}

		mealDate, err := time.Parse("2006-01-02", dateStr)
		if err != nil {
			return
		}

		// Skip dates outside our range
		if mealDate.Before(today) || mealDate.After(endDate) {
			return
		}

		// Find meal sections within this day
		dayEl.Find("div.time[data-section]").Each(func(_ int, sectionEl *goquery.Selection) {
			section, _ := sectionEl.Attr("data-section")
			mealType := normalizeMealType(section)

			// Find meal items within this section
			sectionEl.Find("[data-id]").Each(func(_ int, itemEl *goquery.Selection) {
				meal := models.Meal{
					Date:     mealDate,
					MealType: mealType,
				}

				// Get ID
				if id, exists := itemEl.Attr("data-id"); exists {
					meal.ID = id
				}

				// Try to get recipe name from a.title.recipe or span.title.item
				if recipeLink := itemEl.Find("a.title.recipe, a.title").First(); recipeLink.Length() > 0 {
					meal.RecipeName = strings.TrimSpace(recipeLink.Text())
					if href, exists := recipeLink.Attr("href"); exists {
						// Convert /recipes/{id}/{event_id} to /recipes/{id}/cook/{event_id}
						if strings.HasPrefix(href, "/recipes/") {
							parts := strings.Split(href, "/")
							if len(parts) == 4 { // ["", "recipes", "id", "event_id"]
								href = fmt.Sprintf("/recipes/%s/cook/%s", parts[2], parts[3])
							}
						}
						if !strings.HasPrefix(href, "http") {
							meal.RecipeURL = planToEatWebURL + href
						} else {
							meal.RecipeURL = href
						}
					}
				} else if titleSpan := itemEl.Find("span.title.item, span.title").First(); titleSpan.Length() > 0 {
					meal.RecipeName = strings.TrimSpace(titleSpan.Text())
				}

				if meal.RecipeName != "" {
					meals = append(meals, meal)
				}
			})
		})
	})

	log.Printf("DEBUG [PlanToEat/Meals]: Found %d meals", len(meals))

	return meals, nil
}

// normalizeMealType ensures meal type matches DB constraint (breakfast, lunch, dinner, snack)
func normalizeMealType(mealType string) string {
	lower := strings.ToLower(strings.TrimSpace(mealType))
	switch lower {
	case "breakfast":
		return "breakfast"
	case "lunch":
		return "lunch"
	case "dinner":
		return "dinner"
	case "snack", "xtra", "snacks":
		return "snack"
	default:
		return "dinner"
	}
}

// GetRecipes fetches recipes (for Phase 2)
func (c *PlanToEatClient) GetRecipes(ctx context.Context) error {
	return fmt.Errorf("not implemented yet")
}

// AddMealToPlanner adds a meal to the planner (for Phase 2)
func (c *PlanToEatClient) AddMealToPlanner(ctx context.Context, recipeID string, date time.Time, mealType string) error {
	return fmt.Errorf("not implemented yet")
}

// GetShoppingList fetches the shopping list by scraping the web interface
// Requires a valid session cookie set via SetSessionCookie
func (c *PlanToEatClient) GetShoppingList(ctx context.Context) ([]models.ShoppingItem, error) {
	if c.sessionCookie == "" {
		return nil, fmt.Errorf("session cookie required for shopping list - use SetSessionCookie")
	}

	req, err := http.NewRequestWithContext(ctx, "GET", planToEatWebURL+"/shopping_lists", nil)
	if err != nil {
		return nil, fmt.Errorf("failed to create request: %w", err)
	}

	req.Header.Set("Cookie", c.sessionCookie)
	req.Header.Set("Accept", "text/html")
	req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36")

	log.Printf("DEBUG [PlanToEat/Shopping]: Fetching %s", planToEatWebURL+"/shopping_lists")

	resp, err := c.HTTPClient.Do(req)
	if err != nil {
		return nil, fmt.Errorf("request failed: %w", err)
	}
	defer func() { _ = resp.Body.Close() }()

	log.Printf("DEBUG [PlanToEat/Shopping]: Response status %d", resp.StatusCode)

	if resp.StatusCode != http.StatusOK {
		return nil, fmt.Errorf("unexpected status %d (session may be expired)", resp.StatusCode)
	}

	return parseShoppingListHTML(resp.Body)
}

// parseShoppingListHTML extracts shopping items from the HTML response
func parseShoppingListHTML(body io.Reader) ([]models.ShoppingItem, error) {
	doc, err := goquery.NewDocumentFromReader(body)
	if err != nil {
		return nil, fmt.Errorf("failed to parse HTML: %w", err)
	}

	var items []models.ShoppingItem
	currentStore := ""
	currentCategory := ""

	// Debug: log page title and structure hints
	title := doc.Find("title").First().Text()
	log.Printf("DEBUG [PlanToEat/Shopping]: Page title: %q", title)

	// Check if we got a login page
	titleLower := strings.ToLower(title)
	if strings.Contains(titleLower, "login") || strings.Contains(titleLower, "log in") || strings.Contains(titleLower, "sign in") {
		return nil, fmt.Errorf("session expired - got login page (title: %s)", title)
	}

	// Debug: count elements to understand structure
	storeCount := doc.Find("div.store").Length()
	log.Printf("DEBUG [PlanToEat/Shopping]: Found %d div.store elements", storeCount)

	// Iterate through stores
	doc.Find("div.store").Each(func(_ int, store *goquery.Selection) {
		currentStore = strings.TrimSpace(store.Find("h4.store_name").First().Text())
		// Clean up store name (remove count and icons)
		if idx := strings.Index(currentStore, "("); idx > 0 {
			currentStore = strings.TrimSpace(currentStore[:idx])
		}

		log.Printf("DEBUG [PlanToEat/Shopping]: Processing store: %q", currentStore)

		// Iterate through categories within store
		store.Find("div.category-box").Each(func(_ int, catBox *goquery.Selection) {
			currentCategory = strings.TrimSpace(catBox.Find("p.category-title span").First().Text())

			// Iterate through items in category
			catBox.Find("li.sli").Each(func(_ int, li *goquery.Selection) {
				item := models.ShoppingItem{
					Store:    currentStore,
					Category: currentCategory,
				}

				// Extract ID from class (e.g., "sli i493745889")
				if class, exists := li.Attr("class"); exists {
					for _, c := range strings.Fields(class) {
						if strings.HasPrefix(c, "i") && len(c) > 1 {
							item.ID = c[1:] // Remove 'i' prefix
							break
						}
					}
				}

				// Extract name
				item.Name = strings.TrimSpace(li.Find("strong").First().Text())

				// Extract quantity
				item.Quantity = strings.TrimSpace(li.Find("span.quan").First().Text())
				// Clean up HTML entities in quantity
				item.Quantity = cleanQuantity(item.Quantity)

				// Check if item is checked (has specific class or attribute)
				if li.HasClass("checked") || li.HasClass("crossed") {
					item.Checked = true
				}

				if item.Name != "" {
					items = append(items, item)
				}
			})
		})
	})

	log.Printf("DEBUG [PlanToEat/Shopping]: Parsed %d items total", len(items))

	return items, nil
}

// cleanQuantity removes HTML entities and extra whitespace from quantity strings
func cleanQuantity(q string) string {
	q = strings.ReplaceAll(q, "\u00a0", " ") // non-breaking space
	q = strings.ReplaceAll(q, "  ", " ")
	return strings.TrimSpace(q)
}