Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
"test": "cross-env STRICT_MODE=1 yarn jest",
"test-loose": "cross-env VIRT_ON=1 yarn jest",
"test-storybook": "test-storybook --url http://localhost:9003 --browsers chromium --no-cache",
"test:docs": "yarn playwright install && node scripts/testDocs.js",
"build": "make build",
"test:ssr": "cross-env STRICT_MODE=1 yarn jest --config jest.ssr.config.js",
"ci-test": "cross-env STRICT_MODE=1 yarn jest --maxWorkers=2 && cross-env STRICT_MODE=1 yarn test:ssr --runInBand",
Expand Down Expand Up @@ -172,6 +173,7 @@
"parcel": "2.0.0-dev.1599",
"parcel-resolver-storybook": "https://gitpkg.vercel.app/mischnic/storybook-parcel/packages/parcel-resolver-storybook?master",
"patch-package": "^6.2.0",
"playwright": "^1.45.3",
"plop": "^2.4.0",
"postcss": "^8.4.24",
"postcss-custom-properties": "^13.2.0",
Expand Down
176 changes: 176 additions & 0 deletions scripts/testDocs.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,176 @@
const {chromium, firefox, webkit} = require('playwright');
const {exec} = require('child_process');
const http = require('http');
const path = require('path');
const glob = require('glob-promise');

function parseArgs() {
const args = process.argv.slice(2);
const browser = args[0] || 'chromium';
if (!['chromium', 'firefox', 'webkit'].includes(browser)) {
console.error('Invalid browser specified. Must be "chromium", "firefox", or "webkit". Using "chromium" as default.');
return 'chromium';
}
return browser;
}

async function startServer() {
return new Promise((resolve, reject) => {
console.log('Starting documentation server...');
const child = exec('yarn start:docs', {
env: {...process.env, DOCS_ENV: 'dev'}
});
child.stdout.on('data', (data) => {
console.log(`Server output: ${data}`);
if (data.includes('Server running at')) {
console.log('Documentation server is running');
resolve({process: child, baseUrl: data.split(' ')[3].trim()});
}
});
child.stderr.on('data', (data) => {
console.error(`Server error: ${data}`);
});
});
}

function waitForServer(url, timeout = 30000, interval = 1000) {
return new Promise((resolve, reject) => {
const startTime = Date.now();
const checkServer = () => {
http.get(url, (res) => {
if (res.statusCode === 200) {
resolve();
} else {
retryOrFail();
}
}).on('error', retryOrFail);
};

const retryOrFail = () => {
if (Date.now() - startTime < timeout) {
setTimeout(checkServer, interval);
} else {
reject(new Error('Server did not start in time'));
}
};

checkServer();
});
}

async function getPageLinks() {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is there a reason you're scanning the raw files instead of the pages from the server for URLs? Less links to look at because you're not having to deal with the sidebar or header navigation? We talked about having the list of links from the start being an intentional optimization.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, it seemed a little more organized and you can see the progress as it's running.

const packagePaths = [
'packages/@react-{spectrum,aria,stately}/*/docs/*.mdx',
'packages/react-aria-components/docs/**/*.mdx',
'packages/@internationalized/*/docs/*.mdx'
];

const rootPages = 'packages/dev/docs/pages/**/*.mdx';

let links = [];

for (const pattern of packagePaths) {
const files = await glob(pattern);
for (const file of files) {
const parts = file.split(path.sep);
const packageName = parts[1].replace('@', '');
const componentName = path.basename(file, '.mdx');
links.push(`/${packageName}/${componentName}.html`);
}
}

const rootFiles = await glob(rootPages);
for (const file of rootFiles) {
const relativePath = path.relative('packages/dev/docs/pages', file);
const urlPath = path.join('/', path.dirname(relativePath), path.basename(relativePath, '.mdx'));
links.push(`${urlPath}.html`);
}

return links;
}

async function testDocs() {
let server;
let browser;
let messages = [];
let currentPage = '';

const browserType = parseArgs();
console.log(`Using ${browserType} browser for testing`);

try {
server = await startServer();
await waitForServer(server.baseUrl);

const pageLinks = await getPageLinks().then((links) => links.map((link) => `${server.baseUrl}${link}`));
console.log(`Found ${pageLinks.length} pages to test`);

switch (browserType) {
case 'firefox':
browser = await firefox.launch();
break;
case 'webkit':
browser = await webkit.launch();
break;
default:
browser = await chromium.launch();
}

const context = await browser.newContext();

context.on('console', (msg) => {
const msgUrl = msg.location().url;
if (msgUrl.startsWith(server.baseUrl) && (msg.type() === 'error' || msg.type() === 'warning')) {
console.log(`${msg.type().toUpperCase()} on ${currentPage}: ${msg.text()}`);
messages.push({type: msg.type(), path: currentPage, text: msg.text()});
}
});

for (let i = 0; i < pageLinks.length; i++) {
const url = pageLinks[i];
currentPage = new URL(url).pathname;
console.log(`Testing page (${i + 1}/${pageLinks.length}): ${currentPage}`);

const page = await context.newPage();

try {
const response = await page.goto(url, {
waitUntil: 'networkidle',
timeout: 10000
});

if (!response.ok()) {
console.error(
`Failed to load ${currentPage}: ${response.status()} ${response.statusText()}`
);
}

await page.waitForTimeout(1000);
} catch (error) {
console.error(`Error on ${currentPage}:`, error.message);
} finally {
await page.close();
}
}

console.log('All pages tested successfully');
console.log(`Total pages visited: ${pageLinks.length}`);
console.log(`Total errors: ${messages.filter((msg) => msg.type === 'error').length}`);
console.log(`Total warnings: ${messages.filter((msg) => msg.type === 'warning').length}`);
messages.forEach((msg) => {
console.log(`${msg.type.toUpperCase()} on ${msg.path}: ${msg.text}`);
});
} catch (error) {
console.error('An error occurred during testing:', error);
} finally {
if (browser) {
await browser.close();
}
if (server && server.process) {
server.process.kill();
}
process.exit(0);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thank you!!!

}
}

testDocs();
49 changes: 37 additions & 12 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -18591,6 +18591,16 @@ __metadata:
languageName: node
linkType: hard

"fsevents@npm:2.3.2, fsevents@npm:^2.3.2, fsevents@npm:~2.3.2":
version: 2.3.2
resolution: "fsevents@npm:2.3.2"
dependencies:
node-gyp: "npm:latest"
checksum: 10c0/be78a3efa3e181cda3cf7a4637cb527bcebb0bd0ea0440105a3bb45b86f9245b307dc10a2507e8f4498a7d4ec349d1910f4d73e4d4495b16103106e07eee735b
conditions: os=darwin
languageName: node
linkType: hard

"fsevents@npm:^1.2.7":
version: 1.2.12
resolution: "fsevents@npm:1.2.12"
Expand All @@ -18604,12 +18614,11 @@ __metadata:
languageName: node
linkType: hard

"fsevents@npm:^2.3.2, fsevents@npm:~2.3.2":
"fsevents@patch:fsevents@npm%3A2.3.2#optional!builtin<compat/fsevents>, fsevents@patch:fsevents@npm%3A^2.3.2#optional!builtin<compat/fsevents>, fsevents@patch:fsevents@npm%3A~2.3.2#optional!builtin<compat/fsevents>":
version: 2.3.2
resolution: "fsevents@npm:2.3.2"
resolution: "fsevents@patch:fsevents@npm%3A2.3.2#optional!builtin<compat/fsevents>::version=2.3.2&hash=df0bf1"
dependencies:
node-gyp: "npm:latest"
checksum: 10c0/be78a3efa3e181cda3cf7a4637cb527bcebb0bd0ea0440105a3bb45b86f9245b307dc10a2507e8f4498a7d4ec349d1910f4d73e4d4495b16103106e07eee735b
conditions: os=darwin
languageName: node
linkType: hard
Expand All @@ -18626,15 +18635,6 @@ __metadata:
languageName: node
linkType: hard

"fsevents@patch:fsevents@npm%3A^2.3.2#optional!builtin<compat/fsevents>, fsevents@patch:fsevents@npm%3A~2.3.2#optional!builtin<compat/fsevents>":
version: 2.3.2
resolution: "fsevents@patch:fsevents@npm%3A2.3.2#optional!builtin<compat/fsevents>::version=2.3.2&hash=df0bf1"
dependencies:
node-gyp: "npm:latest"
conditions: os=darwin
languageName: node
linkType: hard

"full-icu@npm:^1.3.0":
version: 1.3.0
resolution: "full-icu@npm:1.3.0"
Expand Down Expand Up @@ -27614,6 +27614,15 @@ __metadata:
languageName: node
linkType: hard

"playwright-core@npm:1.45.3":
version: 1.45.3
resolution: "playwright-core@npm:1.45.3"
bin:
playwright-core: cli.js
checksum: 10c0/39cc5920b27c42300e13a0646ca723578085d85940fc1f03e858fa348b5ac06f2eadf34cf15a0c0f4443e63ae188097d3ddbeb4389e7bbf5ae3438d8f6ed23e1
languageName: node
linkType: hard

"playwright@npm:^1.14.0":
version: 1.36.1
resolution: "playwright@npm:1.36.1"
Expand All @@ -27625,6 +27634,21 @@ __metadata:
languageName: node
linkType: hard

"playwright@npm:^1.45.3":
version: 1.45.3
resolution: "playwright@npm:1.45.3"
dependencies:
fsevents: "npm:2.3.2"
playwright-core: "npm:1.45.3"
dependenciesMeta:
fsevents:
optional: true
bin:
playwright: cli.js
checksum: 10c0/3516ca49deb589171ac6525c0367f2ff948514d791d197f3cc0a135154c2df08a4d7cd11a810e187f35ae9ca490b37ca3a92fb3eb51560f03aefcaca0613efdb
languageName: node
linkType: hard

"plop@npm:^2.4.0":
version: 2.4.0
resolution: "plop@npm:2.4.0"
Expand Down Expand Up @@ -29041,6 +29065,7 @@ __metadata:
parcel: "npm:2.0.0-dev.1599"
parcel-resolver-storybook: "https://gitpkg.vercel.app/mischnic/storybook-parcel/packages/parcel-resolver-storybook?master"
patch-package: "npm:^6.2.0"
playwright: "npm:^1.45.3"
plop: "npm:^2.4.0"
postcss: "npm:^8.4.24"
postcss-custom-properties: "npm:^13.2.0"
Expand Down