diff options
| author | yyamashita <yyamashita@mosquit.one> | 2026-05-10 23:22:17 +0900 |
|---|---|---|
| committer | yyamashita <yyamashita@mosquit.one> | 2026-05-10 23:22:17 +0900 |
| commit | b56e79b5b288b7c9e2fef396b303afc32c9baf5d (patch) | |
| tree | 28080f7f019889659ef1682f4d3661ed9650da54 /app/scrapers/fad-yokohama.ts | |
| parent | 05d2b35a85a46dde9a1264d3002ba86e02e3d5eb (diff) | |
Fix multi-month scrape coverage and add duo MUSIC EXCHANGE
- Extend 8 scrapers (liquid-room, shibuya-o, club-quattro, meets-otsuka,
nishieifuku-jam, fever-shindaita, fad-yokohama, and new duo-music-exchange)
to fetch 3 calendar months instead of 1-2, covering the full 65-day window
- Add duo MUSIC EXCHANGE scraper (渋谷, ~700 cap, /schedule/YYYY/index_YYYY-MM.html)
- Add npm test: Node.js built-in test runner verifies each scraper fetches
all required month URLs via mocked fetch (10 tests, no extra deps)
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
Diffstat (limited to 'app/scrapers/fad-yokohama.ts')
| -rw-r--r-- | app/scrapers/fad-yokohama.ts | 31 |
1 files changed, 11 insertions, 20 deletions
diff --git a/app/scrapers/fad-yokohama.ts b/app/scrapers/fad-yokohama.ts index a01ea0d..f8f7cbc 100644 --- a/app/scrapers/fad-yokohama.ts +++ b/app/scrapers/fad-yokohama.ts @@ -151,27 +151,18 @@ function parsePageEvents( export const scraper: Scraper = { venue, async scrape(): Promise<EventInput[]> { - const res = await fetch(SCHEDULE_URL); - if (!res.ok) throw new Error(`HTTP ${res.status}`); - const html = await res.text(); - - const { year, month, nextUrl } = getMonthContext(html); - const events = parsePageEvents(html, year, month, SCHEDULE_URL); - - if (nextUrl) { - const nextRes = await fetch(nextUrl); - if (nextRes.ok) { - const nextHtml = await nextRes.text(); - let nextMonth = month + 1; - let nextYear = year; - if (nextMonth > 12) { - nextMonth = 1; - nextYear++; - } - events.push(...parsePageEvents(nextHtml, nextYear, nextMonth, nextUrl)); - } + const allEvents: EventInput[] = []; + let url: string | null = SCHEDULE_URL; + + for (let page = 0; page < 3 && url; page++) { + const res = await fetch(url); + if (!res.ok) break; + const html = await res.text(); + const { year, month, nextUrl } = getMonthContext(html); + allEvents.push(...parsePageEvents(html, year, month, url)); + url = nextUrl; } - return events; + return allEvents; }, }; |
