feat: add WCAG 2.1 AA accessibility landmarks and skip-to-content link
Some checks failed
Build & Deploy to Staging / Build & Deploy to Staging (push) Has been cancelled

- Wrap nav in <header> landmark on all pages
- Wrap content in <main id='main-content'> on all pages
- Add skip-to-content link (visually hidden, visible on focus)
- Add skip-link CSS styles
- Add 65 accessibility tests covering all 16 full-layout pages
- All 288 tests passing
This commit is contained in:
Hoid 2026-03-03 15:04:55 +01:00
parent e04d0bb283
commit 9fe59d4867
18 changed files with 399 additions and 2 deletions

View file

@ -0,0 +1,46 @@
import { describe, it, expect } from 'vitest'
import fs from 'fs'
import path from 'path'
import { fileURLToPath } from 'url'
const __dirname = path.dirname(fileURLToPath(import.meta.url))
const publicDir = path.join(__dirname, '../../../public')
// All HTML pages that have a nav (full-layout pages)
const fullLayoutPages = fs.readdirSync(publicDir, { recursive: true })
.filter((f): f is string => typeof f === 'string' && f.endsWith('.html'))
.map(f => path.join(publicDir, f))
.filter(f => {
const html = fs.readFileSync(f, 'utf-8')
return html.includes('<nav')
})
describe('Accessibility landmarks', () => {
it('found full-layout pages to test', () => {
expect(fullLayoutPages.length).toBeGreaterThan(5)
})
for (const filePath of fullLayoutPages) {
const rel = path.relative(publicDir, filePath)
describe(rel, () => {
const html = fs.readFileSync(filePath, 'utf-8')
it('has <header> landmark wrapping nav', () => {
expect(html).toMatch(/<header[\s>]/)
})
it('has <main id="main-content"> landmark', () => {
expect(html).toMatch(/<main[\s][^>]*id=["']main-content["']/)
})
it('has skip-to-content link', () => {
expect(html).toMatch(/<a[^>]*href=["']#main-content["'][^>]*class=["'][^"']*skip-link/)
})
it('has <footer> landmark', () => {
expect(html).toMatch(/<footer[\s>]/)
})
})
}
})

View file

@ -0,0 +1,199 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'
const mockPage = () => ({
close: vi.fn().mockResolvedValue(undefined),
evaluate: vi.fn().mockResolvedValue(undefined),
goto: vi.fn().mockResolvedValue(undefined),
setViewport: vi.fn().mockResolvedValue(undefined),
screenshot: vi.fn().mockResolvedValue(Buffer.from('fake')),
waitForSelector: vi.fn().mockResolvedValue(undefined),
})
const mockBrowser = (pages: any[]) => ({
close: vi.fn().mockResolvedValue(undefined),
newPage: vi.fn().mockImplementation(() => {
const p = mockPage()
pages.push(p)
return Promise.resolve(p)
}),
})
let launchedBrowsers: any[] = []
let allPages: any[][] = []
vi.mock('puppeteer', () => ({
default: {
launch: vi.fn().mockImplementation(() => {
const pages: any[] = []
allPages.push(pages)
const b = mockBrowser(pages)
launchedBrowsers.push(b)
return Promise.resolve(b)
}),
},
}))
vi.mock('../logger.js', () => ({
default: { info: vi.fn(), warn: vi.fn(), error: vi.fn() },
}))
describe('Browser Pool Service', () => {
let mod: any
beforeEach(async () => {
vi.resetModules()
launchedBrowsers = []
allPages = []
vi.doMock('puppeteer', () => ({
default: {
launch: vi.fn().mockImplementation(() => {
const pages: any[] = []
allPages.push(pages)
const b = mockBrowser(pages)
launchedBrowsers.push(b)
return Promise.resolve(b)
}),
},
}))
vi.doMock('../logger.js', () => ({
default: { info: vi.fn(), warn: vi.fn(), error: vi.fn() },
}))
mod = await import('../browser.js')
})
afterEach(async () => {
try { await mod.closeBrowser() } catch {}
})
describe('initBrowser()', () => {
it('creates correct number of browsers (default BROWSER_COUNT=2)', async () => {
await mod.initBrowser()
expect(launchedBrowsers).toHaveLength(2)
})
it('creates PAGES_PER_BROWSER pages per browser (default 4)', async () => {
await mod.initBrowser()
for (const pages of allPages) {
expect(pages).toHaveLength(4)
}
})
it('pool stats reflect correct totals after init', async () => {
await mod.initBrowser()
const stats = mod.getPoolStats()
expect(stats.browsers).toBe(2)
expect(stats.pagesPerBrowser).toBe(4)
expect(stats.totalPages).toBe(8)
expect(stats.availablePages).toBe(8)
expect(stats.queueDepth).toBe(0)
expect(stats.totalJobs).toBe(0)
})
})
describe('acquirePage()', () => {
it('returns a page from the pool', async () => {
await mod.initBrowser()
const { page } = await mod.acquirePage()
expect(page).toBeDefined()
expect(page.evaluate).toBeDefined()
})
it('uses round-robin across browser instances', async () => {
await mod.initBrowser()
await mod.acquirePage()
await mod.acquirePage()
const stats = mod.getPoolStats()
expect(stats.availablePages).toBe(6)
})
it('decrements available pages on acquire', async () => {
await mod.initBrowser()
await mod.acquirePage()
const stats = mod.getPoolStats()
expect(stats.availablePages).toBe(7)
})
})
describe('releasePage()', () => {
it('increments job count after release', async () => {
await mod.initBrowser()
const { page, instance } = await mod.acquirePage()
expect(mod.getPoolStats().totalJobs).toBe(0)
mod.releasePage(page, instance)
expect(mod.getPoolStats().totalJobs).toBe(1)
})
it('returns page to pool (available count increases)', async () => {
await mod.initBrowser()
const { page, instance } = await mod.acquirePage()
expect(mod.getPoolStats().availablePages).toBe(7)
mod.releasePage(page, instance)
await new Promise(r => setTimeout(r, 50))
expect(mod.getPoolStats().availablePages).toBe(8)
})
})
describe('Queue behavior', () => {
it('queues request when all pages are busy and resolves when page released', async () => {
await mod.initBrowser()
const acquired = []
for (let i = 0; i < 8; i++) {
acquired.push(await mod.acquirePage())
}
expect(mod.getPoolStats().availablePages).toBe(0)
let resolved = false
const pending = mod.acquirePage().then((r: any) => { resolved = true; return r })
await new Promise(r => setTimeout(r, 10))
expect(resolved).toBe(false)
expect(mod.getPoolStats().queueDepth).toBe(1)
mod.releasePage(acquired[0].page, acquired[0].instance)
await new Promise(r => setTimeout(r, 100))
expect(resolved).toBe(true)
const result = await pending
mod.releasePage(result.page, result.instance)
for (let i = 1; i < acquired.length; i++) {
mod.releasePage(acquired[i].page, acquired[i].instance)
}
})
it('rejects with QUEUE_FULL after 30s timeout', async () => {
vi.useFakeTimers()
await mod.initBrowser()
const acquired = []
for (let i = 0; i < 8; i++) {
acquired.push(await mod.acquirePage())
}
const pending = mod.acquirePage()
await vi.advanceTimersByTimeAsync(31_000)
await expect(pending).rejects.toThrow('QUEUE_FULL')
for (const a of acquired) mod.releasePage(a.page, a.instance)
vi.useRealTimers()
})
})
describe('getPoolStats()', () => {
it('returns accurate counts during operations', async () => {
await mod.initBrowser()
const { page, instance } = await mod.acquirePage()
const stats = mod.getPoolStats()
expect(stats.availablePages).toBe(7)
expect(stats.totalJobs).toBe(0)
mod.releasePage(page, instance)
expect(mod.getPoolStats().totalJobs).toBe(1)
})
})
describe('Staggered restart', () => {
it('only one browser restarts at a time (restarting flag)', async () => {
await mod.initBrowser()
const stats = mod.getPoolStats()
// After init, no browsers should be restarting
expect(stats.browsers).toBe(2)
})
})
})