Refactor and improve external URL checks
- Move external URL checks to its own module under `tests/`. This separates them from integration test, addressing long runs and frequent failures that led to ignoring test results. - Move `check-desktop-runtime-errors` to `tests/checks` to keep all test-related checks into one directory. - Replace `ts-node` with `vite` for running `check-desktop-runtime-errors` to maintain a consistent execution environment across checks. - Implement a timeout for each fetch call. - Be nice to external sources, wait 5 seconds before sending another request to an URL under same domain. This solves rate-limiting issues. - Instead of running test on every push/pull request, run them only weekly. - Do not run tests on each commit/PR but only scheduled (weekly) to minimize noise. - Fix URLs are not captured correctly inside backticks or parenthesis.
This commit is contained in:
@@ -0,0 +1,29 @@
|
||||
# check-desktop-runtime-errors
|
||||
|
||||
This script automates the processes of:
|
||||
|
||||
1) Building
|
||||
2) Packaging
|
||||
3) Installing
|
||||
4) Executing
|
||||
5) Verifying Electron distributions
|
||||
|
||||
It runs the application for a duration and detects runtime errors in the packaged application via:
|
||||
|
||||
- **Log verification**: Checking application logs for errors and validating successful application initialization.
|
||||
- **`stderr` monitoring**: Continuous listening to the `stderr` stream for unexpected errors.
|
||||
- **Window title inspection**: Checking for window titles that indicate crashes before logging becomes possible.
|
||||
|
||||
Upon error, the script captures a screenshot (if `--screenshot` is provided) and terminates.
|
||||
|
||||
## Options
|
||||
|
||||
- `--build`: Clears the electron distribution directory and forces a rebuild of the Electron app.
|
||||
- `--screenshot`: Takes a screenshot of the desktop environment after running the application.
|
||||
|
||||
This module provides utilities for building, executing, and validating Electron desktop apps.
|
||||
It can be used to automate checking for runtime errors during development.
|
||||
|
||||
## Configs
|
||||
|
||||
Configurations are defined in [`config.ts`](./config.ts).
|
||||
@@ -0,0 +1,82 @@
|
||||
import { unlink, readFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { log, die, LogLevel } from '../utils/log';
|
||||
import { exists } from '../utils/io';
|
||||
import { SupportedPlatform, CURRENT_PLATFORM } from '../utils/platform';
|
||||
import { getAppName } from '../utils/npm';
|
||||
|
||||
const LOG_FILE_NAMES = ['main', 'renderer'];
|
||||
|
||||
export async function clearAppLogFiles(
|
||||
projectDir: string,
|
||||
): Promise<void> {
|
||||
if (!projectDir) { throw new Error('missing project directory'); }
|
||||
await Promise.all(LOG_FILE_NAMES.map(async (logFileName) => {
|
||||
const logPath = await determineLogPath(projectDir, logFileName);
|
||||
if (!logPath || !await exists(logPath)) {
|
||||
log(`Skipping clearing logs, log file does not exist: ${logPath}.`);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await unlink(logPath);
|
||||
log(`Successfully cleared the log file at: ${logPath}.`);
|
||||
} catch (error) {
|
||||
die(`Failed to clear the log file at: ${logPath}. Reason: ${error}`);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
export async function readAppLogFile(
|
||||
projectDir: string,
|
||||
logFileName: string,
|
||||
): Promise<AppLogFileResult> {
|
||||
if (!projectDir) { throw new Error('missing project directory'); }
|
||||
const logPath = await determineLogPath(projectDir, logFileName);
|
||||
if (!logPath || !await exists(logPath)) {
|
||||
log(`No log file at: ${logPath}`, LogLevel.Warn);
|
||||
return {
|
||||
logFilePath: logPath,
|
||||
};
|
||||
}
|
||||
const logContent = await readLogFile(logPath);
|
||||
return {
|
||||
logFileContent: logContent,
|
||||
logFilePath: logPath,
|
||||
};
|
||||
}
|
||||
|
||||
interface AppLogFileResult {
|
||||
readonly logFilePath: string;
|
||||
readonly logFileContent?: string;
|
||||
}
|
||||
|
||||
async function determineLogPath(
|
||||
projectDir: string,
|
||||
logFileName: string,
|
||||
): Promise<string> {
|
||||
if (!projectDir) { throw new Error('missing project directory'); }
|
||||
if (!LOG_FILE_NAMES.includes(logFileName)) { throw new Error(`unknown log file name: ${logFileName}`); }
|
||||
const appName = await getAppName(projectDir);
|
||||
if (!appName) {
|
||||
return die('App name not found.');
|
||||
}
|
||||
const logFilePaths: {
|
||||
readonly [K in SupportedPlatform]: () => string;
|
||||
} = {
|
||||
[SupportedPlatform.macOS]: () => join(process.env.HOME, 'Library', 'Logs', appName, `${logFileName}.log`),
|
||||
[SupportedPlatform.Linux]: () => join(process.env.HOME, '.config', appName, 'logs', `${logFileName}.log`),
|
||||
[SupportedPlatform.Windows]: () => join(process.env.USERPROFILE, 'AppData', 'Roaming', appName, 'logs', `${logFileName}.log`),
|
||||
};
|
||||
const logFilePath = logFilePaths[CURRENT_PLATFORM]?.();
|
||||
if (!logFilePath) {
|
||||
log(`Cannot determine log path, unsupported OS: ${SupportedPlatform[CURRENT_PLATFORM]}`, LogLevel.Warn);
|
||||
}
|
||||
return logFilePath;
|
||||
}
|
||||
|
||||
async function readLogFile(
|
||||
logFilePath: string,
|
||||
): Promise<string | undefined> {
|
||||
const content = await readFile(logFilePath, 'utf-8');
|
||||
return content?.trim().length > 0 ? content : undefined;
|
||||
}
|
||||
@@ -0,0 +1,181 @@
|
||||
import { splitTextIntoLines, indentText } from '../utils/text';
|
||||
import { log, die } from '../utils/log';
|
||||
import { readAppLogFile } from './app-logs';
|
||||
import { STDERR_IGNORE_PATTERNS } from './error-ignore-patterns';
|
||||
|
||||
const ELECTRON_CRASH_TITLE = 'Error'; // Used by electron for early crashes
|
||||
const LOG_ERROR_MARKER = '[error]'; // from electron-log
|
||||
const EXPECTED_LOG_MARKERS = [
|
||||
'[WINDOW_INIT]',
|
||||
'[PRELOAD_INIT]',
|
||||
'[APP_INIT]',
|
||||
];
|
||||
|
||||
type ProcessType = 'main' | 'renderer';
|
||||
|
||||
export async function checkForErrors(
|
||||
stderr: string,
|
||||
windowTitles: readonly string[],
|
||||
projectDir: string,
|
||||
) {
|
||||
if (!projectDir) { throw new Error('missing project directory'); }
|
||||
const errors = await gatherErrors(stderr, windowTitles, projectDir);
|
||||
if (errors.length) {
|
||||
die(formatErrors(errors));
|
||||
}
|
||||
}
|
||||
|
||||
async function gatherErrors(
|
||||
stderr: string,
|
||||
windowTitles: readonly string[],
|
||||
projectDir: string,
|
||||
): Promise<ExecutionError[]> {
|
||||
if (!projectDir) { throw new Error('missing project directory'); }
|
||||
const { logFileContent: mainLogs, logFilePath: mainLogFile } = await readAppLogFile(projectDir, 'main');
|
||||
const { logFileContent: rendererLogs, logFilePath: rendererLogFile } = await readAppLogFile(projectDir, 'renderer');
|
||||
const allLogs = [mainLogs, rendererLogs, stderr].filter(Boolean).join('\n');
|
||||
return [
|
||||
verifyStdErr(stderr),
|
||||
verifyApplicationLogsExist('main', mainLogs, mainLogFile),
|
||||
verifyApplicationLogsExist('renderer', rendererLogs, rendererLogFile),
|
||||
...EXPECTED_LOG_MARKERS.map(
|
||||
(marker) => verifyLogMarkerExistsInLogs(allLogs, marker),
|
||||
),
|
||||
verifyWindowTitle(windowTitles),
|
||||
verifyErrorsInLogs(allLogs),
|
||||
].filter(Boolean);
|
||||
}
|
||||
|
||||
interface ExecutionError {
|
||||
readonly reason: string;
|
||||
readonly description: string;
|
||||
}
|
||||
|
||||
function formatErrors(errors: readonly ExecutionError[]): string {
|
||||
if (!errors?.length) { throw new Error('missing errors'); }
|
||||
return [
|
||||
'Errors detected during execution:',
|
||||
...errors.map(
|
||||
(error) => formatError(error),
|
||||
),
|
||||
].join('\n---\n');
|
||||
}
|
||||
|
||||
function formatError(error: ExecutionError): string {
|
||||
if (!error) { throw new Error('missing error'); }
|
||||
if (!error.reason) { throw new Error(`missing reason, error (${typeof error}): ${JSON.stringify(error)}`); }
|
||||
let message = `Reason: ${indentText(error.reason, 1)}`;
|
||||
if (error.description) {
|
||||
message += `\nDescription:\n${indentText(error.description, 2)}`;
|
||||
}
|
||||
return message;
|
||||
}
|
||||
|
||||
function verifyApplicationLogsExist(
|
||||
processType: ProcessType,
|
||||
logContent: string | undefined,
|
||||
logFilePath: string,
|
||||
): ExecutionError | undefined {
|
||||
if (!logContent?.length) {
|
||||
return describeError(
|
||||
`Missing application (${processType}) logs`,
|
||||
'Application logs are empty not were not found.'
|
||||
+ `\nLog path: ${logFilePath}`,
|
||||
);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function verifyLogMarkerExistsInLogs(
|
||||
logContent: string | undefined,
|
||||
marker: string,
|
||||
) : ExecutionError | undefined {
|
||||
if (!marker) {
|
||||
throw new Error('missing marker');
|
||||
}
|
||||
if (!logContent?.includes(marker)) {
|
||||
return describeError(
|
||||
'Incomplete application logs',
|
||||
`Missing identifier "${marker}" in application logs.`,
|
||||
);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function verifyWindowTitle(
|
||||
windowTitles: readonly string[],
|
||||
) : ExecutionError | undefined {
|
||||
const errorTitles = windowTitles.filter(
|
||||
(title) => title.toLowerCase().includes(ELECTRON_CRASH_TITLE),
|
||||
);
|
||||
if (errorTitles.length) {
|
||||
return describeError(
|
||||
'Unexpected window title',
|
||||
'One or more window titles suggest an error occurred in the application:'
|
||||
+ `\nError Titles: ${errorTitles.join(', ')}`
|
||||
+ `\nAll Titles: ${windowTitles.join(', ')}`,
|
||||
);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function verifyStdErr(
|
||||
stderrOutput: string | undefined,
|
||||
) : ExecutionError | undefined {
|
||||
if (stderrOutput && stderrOutput.length > 0) {
|
||||
const ignoredErrorLines = new Set();
|
||||
const relevantErrors = getNonEmptyLines(stderrOutput)
|
||||
.filter((line) => {
|
||||
line = line.trim();
|
||||
if (STDERR_IGNORE_PATTERNS.some((pattern) => pattern.test(line))) {
|
||||
ignoredErrorLines.add(line);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
if (ignoredErrorLines.size > 0) {
|
||||
log(`Ignoring \`stderr\` lines:\n${indentText([...ignoredErrorLines].join('\n'), 1)}`);
|
||||
}
|
||||
if (relevantErrors.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
return describeError(
|
||||
'Standard error stream (`stderr`) is not empty.',
|
||||
`Relevant errors (${relevantErrors.length}):\n${indentText(relevantErrors.map((error) => `- ${error}`).join('\n'), 1)}`
|
||||
+ `\nFull \`stderr\` output:\n${indentText(stderrOutput, 1)}`,
|
||||
);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function verifyErrorsInLogs(
|
||||
logContent: string | undefined,
|
||||
) : ExecutionError | undefined {
|
||||
if (!logContent?.length) {
|
||||
return undefined;
|
||||
}
|
||||
const logLines = getNonEmptyLines(logContent)
|
||||
.filter((line) => line.includes(LOG_ERROR_MARKER));
|
||||
if (!logLines.length) {
|
||||
return undefined;
|
||||
}
|
||||
return describeError(
|
||||
'Application log file',
|
||||
logLines.join('\n'),
|
||||
);
|
||||
}
|
||||
|
||||
function describeError(
|
||||
reason: string,
|
||||
description: string,
|
||||
) : ExecutionError | undefined {
|
||||
return {
|
||||
reason,
|
||||
description: `${description}\n\nThis might indicate an early crash or significant runtime issue.`,
|
||||
};
|
||||
}
|
||||
|
||||
function getNonEmptyLines(text: string) {
|
||||
return splitTextIntoLines(text)
|
||||
.filter((line) => line?.trim().length > 0);
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
/* eslint-disable vue/max-len */
|
||||
|
||||
/* Ignore errors specific to host environment, rather than application execution */
|
||||
export const STDERR_IGNORE_PATTERNS: readonly RegExp[] = [
|
||||
/*
|
||||
OS: Linux
|
||||
Background:
|
||||
GLIBC and libgiolibproxy.so were seen on local Linux (Ubuntu-based) installation.
|
||||
Original logs:
|
||||
/snap/core20/current/lib/x86_64-linux-gnu/libstdc++.so.6: version `GLIBCXX_3.4.29' not found (required by /lib/x86_64-linux-gnu/libproxy.so.1)
|
||||
Failed to load module: /home/bob/snap/code/common/.cache/gio-modules/libgiolibproxy.so
|
||||
[334053:0829/122143.595703:ERROR:browser_main_loop.cc(274)] GLib: Failed to set scheduler settings: Operation not permitted
|
||||
*/
|
||||
/libstdc\+\+\.so.*?GLIBCXX_.*?not found/,
|
||||
/Failed to load module: .*?libgiolibproxy\.so/,
|
||||
/\[.*?:ERROR:browser_main_loop\.cc.*?\] GLib: Failed to set scheduler settings: Operation not permitted/,
|
||||
|
||||
/*
|
||||
OS: macOS
|
||||
Background:
|
||||
Observed when running on GitHub runner, but not on local macOS environment.
|
||||
Original logs:
|
||||
[1571:0828/162611.460587:ERROR:trust_store_mac.cc(844)] Error parsing certificate:
|
||||
ERROR: Failed parsing extensions
|
||||
*/
|
||||
/ERROR:trust_store_mac\.cc.*?Error parsing certificate:/,
|
||||
/ERROR: Failed parsing extensions/,
|
||||
|
||||
/*
|
||||
OS: Linux (GitHub Actions)
|
||||
Background:
|
||||
Occur during Electron's GPU process initialization. Common in headless CI/CD environments.
|
||||
Not indicative of a problem in typical desktop environments.
|
||||
Original logs:
|
||||
[3548:0828/162502.835833:ERROR:viz_main_impl.cc(186)] Exiting GPU process due to errors during initialization
|
||||
[3627:0828/162503.133178:ERROR:viz_main_impl.cc(186)] Exiting GPU process due to errors during initialization
|
||||
[3621:0828/162503.420173:ERROR:command_buffer_proxy_impl.cc(128)] ContextResult::kTransientFailure: Failed to send GpuControl.CreateCommandBuffer.
|
||||
*/
|
||||
/ERROR:viz_main_impl\.cc.*?Exiting GPU process due to errors during initialization/,
|
||||
/ERROR:command_buffer_proxy_impl\.cc.*?ContextResult::kTransientFailure: Failed to send GpuControl\.CreateCommandBuffer\./,
|
||||
];
|
||||
@@ -0,0 +1,46 @@
|
||||
import { join } from 'path';
|
||||
import { readdir } from 'fs/promises';
|
||||
import { die } from '../../../utils/log';
|
||||
import { exists } from '../../../utils/io';
|
||||
import { getAppName } from '../../../utils/npm';
|
||||
|
||||
export async function findByFilePattern(
|
||||
pattern: string,
|
||||
directory: string,
|
||||
projectRootDir: string,
|
||||
): Promise<ArtifactLocation> {
|
||||
if (!directory) { throw new Error('Missing directory'); }
|
||||
if (!pattern) { throw new Error('Missing file pattern'); }
|
||||
|
||||
if (!await exists(directory)) {
|
||||
return die(`Directory does not exist: ${directory}`);
|
||||
}
|
||||
|
||||
const directoryContents = await readdir(directory);
|
||||
const appName = await getAppName(projectRootDir);
|
||||
const regexPattern = pattern
|
||||
/* eslint-disable no-template-curly-in-string */
|
||||
.replaceAll('${name}', escapeRegExp(appName))
|
||||
.replaceAll('${version}', '\\d+\\.\\d+\\.\\d+')
|
||||
.replaceAll('${ext}', '.*');
|
||||
/* eslint-enable no-template-curly-in-string */
|
||||
const regex = new RegExp(`^${regexPattern}$`);
|
||||
const foundFileNames = directoryContents.filter((file) => regex.test(file));
|
||||
if (!foundFileNames.length) {
|
||||
return die(`No files found matching pattern "${pattern}" in ${directory} directory.`);
|
||||
}
|
||||
if (foundFileNames.length > 1) {
|
||||
return die(`Found multiple files matching pattern "${pattern}": ${foundFileNames.join(', ')}`);
|
||||
}
|
||||
return {
|
||||
absolutePath: join(directory, foundFileNames[0]),
|
||||
};
|
||||
}
|
||||
|
||||
function escapeRegExp(string: string) {
|
||||
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
}
|
||||
|
||||
interface ArtifactLocation {
|
||||
readonly absolutePath?: string;
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
export interface ExtractionResult {
|
||||
readonly appExecutablePath: string;
|
||||
readonly cleanup?: () => Promise<void>;
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
import { access, chmod } from 'fs/promises';
|
||||
import { constants } from 'fs';
|
||||
import { log } from '../../utils/log';
|
||||
import { ExtractionResult } from './common/extraction-result';
|
||||
import { findByFilePattern } from './common/app-artifact-locator';
|
||||
|
||||
export async function prepareLinuxApp(
|
||||
desktopDistPath: string,
|
||||
projectRootDir: string,
|
||||
): Promise<ExtractionResult> {
|
||||
const { absolutePath: appFile } = await findByFilePattern(
|
||||
// eslint-disable-next-line no-template-curly-in-string
|
||||
'${name}-${version}.AppImage',
|
||||
desktopDistPath,
|
||||
projectRootDir,
|
||||
);
|
||||
await makeExecutable(appFile);
|
||||
return {
|
||||
appExecutablePath: appFile,
|
||||
};
|
||||
}
|
||||
|
||||
async function makeExecutable(appFile: string): Promise<void> {
|
||||
if (!appFile) { throw new Error('missing file'); }
|
||||
if (await isExecutable(appFile)) {
|
||||
log('AppImage is already executable.');
|
||||
return;
|
||||
}
|
||||
log('Making it executable...');
|
||||
await chmod(appFile, 0o755);
|
||||
}
|
||||
|
||||
async function isExecutable(file: string): Promise<boolean> {
|
||||
try {
|
||||
await access(file, constants.X_OK);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
import { runCommand } from '../../utils/run-command';
|
||||
import { exists } from '../../utils/io';
|
||||
import { log, die, LogLevel } from '../../utils/log';
|
||||
import { sleep } from '../../utils/sleep';
|
||||
import { ExtractionResult } from './common/extraction-result';
|
||||
import { findByFilePattern } from './common/app-artifact-locator';
|
||||
|
||||
export async function prepareMacOsApp(
|
||||
desktopDistPath: string,
|
||||
projectRootDir: string,
|
||||
): Promise<ExtractionResult> {
|
||||
const { absolutePath: dmgPath } = await findByFilePattern(
|
||||
// eslint-disable-next-line no-template-curly-in-string
|
||||
'${name}-${version}.dmg',
|
||||
desktopDistPath,
|
||||
projectRootDir,
|
||||
);
|
||||
const { mountPath } = await mountDmg(dmgPath);
|
||||
const appPath = await findMacAppExecutablePath(mountPath);
|
||||
return {
|
||||
appExecutablePath: appPath,
|
||||
cleanup: async () => {
|
||||
log('Cleaning up resources...');
|
||||
await detachMount(mountPath);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function mountDmg(
|
||||
dmgFile: string,
|
||||
) {
|
||||
const { stdout: hdiutilOutput, error } = await runCommand(
|
||||
`hdiutil attach '${dmgFile}'`,
|
||||
);
|
||||
if (error) {
|
||||
die(`Failed to mount DMG file at ${dmgFile}.\n${error}`);
|
||||
}
|
||||
const mountPathMatch = hdiutilOutput.match(/\/Volumes\/[^\n]+/);
|
||||
const mountPath = mountPathMatch ? mountPathMatch[0] : null;
|
||||
return {
|
||||
mountPath,
|
||||
};
|
||||
}
|
||||
|
||||
async function findMacAppExecutablePath(
|
||||
mountPath: string,
|
||||
): Promise<string> {
|
||||
const { stdout: findOutput, error } = await runCommand(
|
||||
`find '${mountPath}' -maxdepth 1 -type d -name "*.app"`,
|
||||
);
|
||||
if (error) {
|
||||
return die(`Failed to find executable path at mount path ${mountPath}\n${error}`);
|
||||
}
|
||||
const appFolder = findOutput.trim();
|
||||
const appName = appFolder.split('/').pop().replace('.app', '');
|
||||
const appPath = `${appFolder}/Contents/MacOS/${appName}`;
|
||||
if (await exists(appPath)) {
|
||||
log(`Application is located at ${appPath}`);
|
||||
} else {
|
||||
return die(`Application does not exist at ${appPath}`);
|
||||
}
|
||||
return appPath;
|
||||
}
|
||||
|
||||
async function detachMount(
|
||||
mountPath: string,
|
||||
retries = 5,
|
||||
) {
|
||||
const { error } = await runCommand(`hdiutil detach '${mountPath}'`);
|
||||
if (error) {
|
||||
if (retries <= 0) {
|
||||
log(`Failed to detach mount after multiple attempts: ${mountPath}\n${error}`, LogLevel.Warn);
|
||||
return;
|
||||
}
|
||||
await sleep(500);
|
||||
await detachMount(mountPath, retries - 1);
|
||||
return;
|
||||
}
|
||||
log(`Successfully detached from ${mountPath}`);
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
import { mkdtemp, rm } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { tmpdir } from 'os';
|
||||
import { exists } from '../../utils/io';
|
||||
import { log, die, LogLevel } from '../../utils/log';
|
||||
import { runCommand } from '../../utils/run-command';
|
||||
import { ExtractionResult } from './common/extraction-result';
|
||||
import { findByFilePattern } from './common/app-artifact-locator';
|
||||
|
||||
export async function prepareWindowsApp(
|
||||
desktopDistPath: string,
|
||||
projectRootDir: string,
|
||||
): Promise<ExtractionResult> {
|
||||
const workdir = await mkdtemp(join(tmpdir(), 'win-nsis-installation-'));
|
||||
if (await exists(workdir)) {
|
||||
log(`Temporary directory ${workdir} already exists, cleaning up...`);
|
||||
await rm(workdir, { recursive: true });
|
||||
}
|
||||
const appExecutablePath = await installNsis(workdir, desktopDistPath, projectRootDir);
|
||||
return {
|
||||
appExecutablePath,
|
||||
cleanup: async () => {
|
||||
log(`Cleaning up working directory ${workdir}...`);
|
||||
try {
|
||||
await rm(workdir, { recursive: true, force: true });
|
||||
} catch (error) {
|
||||
log(`Could not cleanup the working directory: ${error.message}`, LogLevel.Error);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function installNsis(
|
||||
installationPath: string,
|
||||
desktopDistPath: string,
|
||||
projectRootDir: string,
|
||||
): Promise<string> {
|
||||
const { absolutePath: installerPath } = await findByFilePattern(
|
||||
// eslint-disable-next-line no-template-curly-in-string
|
||||
'${name}-Setup-${version}.exe',
|
||||
desktopDistPath,
|
||||
projectRootDir,
|
||||
);
|
||||
log(`Silently installing contents of ${installerPath} to ${installationPath}...`);
|
||||
const { error } = await runCommand(`"${installerPath}" /S /D=${installationPath}`);
|
||||
if (error) {
|
||||
return die(`Failed to install.\n${error}`);
|
||||
}
|
||||
|
||||
const { absolutePath: appExecutablePath } = await findByFilePattern(
|
||||
// eslint-disable-next-line no-template-curly-in-string
|
||||
'${name}.exe',
|
||||
installationPath,
|
||||
projectRootDir,
|
||||
);
|
||||
|
||||
return appExecutablePath;
|
||||
}
|
||||
@@ -0,0 +1,200 @@
|
||||
import { spawn } from 'child_process';
|
||||
import { log, LogLevel, die } from '../utils/log';
|
||||
import { captureScreen } from './system-capture/screen-capture';
|
||||
import { captureWindowTitles } from './system-capture/window-title-capture';
|
||||
import type { ChildProcess } from 'child_process';
|
||||
|
||||
const TERMINATION_GRACE_PERIOD_IN_SECONDS = 20;
|
||||
const TERMINATION_CHECK_INTERVAL_IN_MS = 1000;
|
||||
const WINDOW_TITLE_CAPTURE_INTERVAL_IN_MS = 100;
|
||||
|
||||
export function runApplication(
|
||||
appFile: string,
|
||||
executionDurationInSeconds: number,
|
||||
enableScreenshot: boolean,
|
||||
screenshotPath: string,
|
||||
): Promise<ApplicationExecutionResult> {
|
||||
if (!appFile) {
|
||||
throw new Error('Missing app file');
|
||||
}
|
||||
|
||||
logDetails(appFile, executionDurationInSeconds);
|
||||
|
||||
const processDetails: ApplicationProcessDetails = {
|
||||
stderrData: '',
|
||||
stdoutData: '',
|
||||
explicitlyKilled: false,
|
||||
windowTitles: [],
|
||||
isCrashed: false,
|
||||
isDone: false,
|
||||
process: undefined,
|
||||
resolve: () => { /* NOOP */ },
|
||||
};
|
||||
|
||||
const process = spawn(appFile);
|
||||
processDetails.process = process;
|
||||
|
||||
return new Promise((resolve) => {
|
||||
processDetails.resolve = resolve;
|
||||
beginCapturingTitles(process.pid, processDetails);
|
||||
handleProcessEvents(
|
||||
processDetails,
|
||||
enableScreenshot,
|
||||
screenshotPath,
|
||||
executionDurationInSeconds,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
interface ApplicationExecutionResult {
|
||||
readonly stderr: string,
|
||||
readonly stdout: string,
|
||||
readonly windowTitles: readonly string[],
|
||||
readonly isCrashed: boolean,
|
||||
}
|
||||
|
||||
interface ApplicationProcessDetails {
|
||||
stderrData: string;
|
||||
stdoutData: string;
|
||||
explicitlyKilled: boolean;
|
||||
windowTitles: Array<string>;
|
||||
isCrashed: boolean;
|
||||
isDone: boolean;
|
||||
process: ChildProcess;
|
||||
resolve: (value: ApplicationExecutionResult) => void;
|
||||
}
|
||||
|
||||
function logDetails(
|
||||
appFile: string,
|
||||
executionDurationInSeconds: number,
|
||||
): void {
|
||||
log(
|
||||
[
|
||||
'Executing the app to check for errors...',
|
||||
`Maximum execution time: ${executionDurationInSeconds}`,
|
||||
`Application path: ${appFile}`,
|
||||
].join('\n\t'),
|
||||
);
|
||||
}
|
||||
|
||||
function beginCapturingTitles(
|
||||
processId: number,
|
||||
processDetails: ApplicationProcessDetails,
|
||||
): void {
|
||||
const capture = async () => {
|
||||
const titles = await captureWindowTitles(processId);
|
||||
|
||||
(titles || []).forEach((title) => {
|
||||
if (!title?.length) {
|
||||
return;
|
||||
}
|
||||
if (!processDetails.windowTitles.includes(title)) {
|
||||
log(`New window title captured: ${title}`);
|
||||
processDetails.windowTitles.push(title);
|
||||
}
|
||||
});
|
||||
|
||||
if (!processDetails.isDone) {
|
||||
setTimeout(capture, WINDOW_TITLE_CAPTURE_INTERVAL_IN_MS);
|
||||
}
|
||||
};
|
||||
|
||||
capture();
|
||||
}
|
||||
|
||||
function handleProcessEvents(
|
||||
processDetails: ApplicationProcessDetails,
|
||||
enableScreenshot: boolean,
|
||||
screenshotPath: string,
|
||||
executionDurationInSeconds: number,
|
||||
): void {
|
||||
const { process } = processDetails;
|
||||
process.stderr.on('data', (data) => {
|
||||
processDetails.stderrData += data.toString();
|
||||
});
|
||||
process.stdout.on('data', (data) => {
|
||||
processDetails.stdoutData += data.toString();
|
||||
});
|
||||
|
||||
process.on('error', (error) => {
|
||||
die(`An issue spawning the child process: ${error}`);
|
||||
});
|
||||
|
||||
process.on('exit', async (code) => {
|
||||
await onProcessExit(code, processDetails, enableScreenshot, screenshotPath);
|
||||
});
|
||||
|
||||
setTimeout(async () => {
|
||||
await onExecutionLimitReached(processDetails, enableScreenshot, screenshotPath);
|
||||
}, executionDurationInSeconds * 1000);
|
||||
}
|
||||
|
||||
async function onProcessExit(
|
||||
code: number,
|
||||
processDetails: ApplicationProcessDetails,
|
||||
enableScreenshot: boolean,
|
||||
screenshotPath: string,
|
||||
): Promise<void> {
|
||||
log(`Application exited ${code === null || Number.isNaN(code) ? '.' : `with code ${code}`}`);
|
||||
|
||||
if (processDetails.explicitlyKilled) return;
|
||||
|
||||
processDetails.isCrashed = true;
|
||||
|
||||
if (enableScreenshot) {
|
||||
await captureScreen(screenshotPath);
|
||||
}
|
||||
|
||||
finishProcess(processDetails);
|
||||
}
|
||||
|
||||
async function onExecutionLimitReached(
|
||||
processDetails: ApplicationProcessDetails,
|
||||
enableScreenshot: boolean,
|
||||
screenshotPath: string,
|
||||
): Promise<void> {
|
||||
if (enableScreenshot) {
|
||||
await captureScreen(screenshotPath);
|
||||
}
|
||||
|
||||
processDetails.explicitlyKilled = true;
|
||||
await terminateGracefully(processDetails.process);
|
||||
finishProcess(processDetails);
|
||||
}
|
||||
|
||||
function finishProcess(processDetails: ApplicationProcessDetails): void {
|
||||
processDetails.isDone = true;
|
||||
processDetails.resolve({
|
||||
stderr: processDetails.stderrData,
|
||||
stdout: processDetails.stdoutData,
|
||||
windowTitles: [...processDetails.windowTitles],
|
||||
isCrashed: processDetails.isCrashed,
|
||||
});
|
||||
}
|
||||
|
||||
async function terminateGracefully(
|
||||
process: ChildProcess,
|
||||
): Promise<void> {
|
||||
let elapsedSeconds = 0;
|
||||
log('Attempting to terminate the process gracefully...');
|
||||
process.kill('SIGTERM');
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const checkInterval = setInterval(() => {
|
||||
elapsedSeconds += TERMINATION_CHECK_INTERVAL_IN_MS / 1000;
|
||||
|
||||
if (elapsedSeconds >= TERMINATION_GRACE_PERIOD_IN_SECONDS) {
|
||||
process.kill('SIGKILL');
|
||||
log('Process did not terminate gracefully within the grace period. Forcing termination.', LogLevel.Warn);
|
||||
clearInterval(checkInterval);
|
||||
resolve();
|
||||
}
|
||||
}, TERMINATION_CHECK_INTERVAL_IN_MS);
|
||||
|
||||
process.on('exit', () => {
|
||||
log('Process terminated gracefully.');
|
||||
clearInterval(checkInterval);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
import { unlink } from 'fs/promises';
|
||||
import { runCommand } from '../../utils/run-command';
|
||||
import { log, LogLevel } from '../../utils/log';
|
||||
import { CURRENT_PLATFORM, SupportedPlatform } from '../../utils/platform';
|
||||
import { exists } from '../../utils/io';
|
||||
|
||||
export async function captureScreen(
|
||||
imagePath: string,
|
||||
): Promise<void> {
|
||||
if (!imagePath) {
|
||||
throw new Error('Path for screenshot not provided');
|
||||
}
|
||||
|
||||
if (await exists(imagePath)) {
|
||||
log(`Screenshot file already exists at ${imagePath}. It will be overwritten.`, LogLevel.Warn);
|
||||
unlink(imagePath);
|
||||
}
|
||||
|
||||
const platformCommands: {
|
||||
readonly [K in SupportedPlatform]: string;
|
||||
} = {
|
||||
[SupportedPlatform.macOS]: `screencapture -x ${imagePath}`,
|
||||
[SupportedPlatform.Linux]: `import -window root ${imagePath}`,
|
||||
[SupportedPlatform.Windows]: `powershell -NoProfile -EncodedCommand ${encodeForPowershell(getScreenshotPowershellScript(imagePath))}`,
|
||||
};
|
||||
|
||||
const commandForPlatform = platformCommands[CURRENT_PLATFORM];
|
||||
|
||||
if (!commandForPlatform) {
|
||||
log(`Screenshot capture not supported on: ${SupportedPlatform[CURRENT_PLATFORM]}`, LogLevel.Warn);
|
||||
return;
|
||||
}
|
||||
|
||||
log(`Capturing screenshot to ${imagePath} using command:\n\t> ${commandForPlatform}`);
|
||||
|
||||
const { error } = await runCommand(commandForPlatform);
|
||||
if (error) {
|
||||
log(`Failed to capture screenshot.\n${error}`, LogLevel.Warn);
|
||||
return;
|
||||
}
|
||||
log(`Captured screenshot to ${imagePath}.`);
|
||||
}
|
||||
|
||||
function getScreenshotPowershellScript(imagePath: string): string {
|
||||
return `
|
||||
$ProgressPreference = 'SilentlyContinue' # Do not pollute stderr
|
||||
Add-Type -AssemblyName System.Windows.Forms
|
||||
$screenBounds = [System.Windows.Forms.Screen]::PrimaryScreen.Bounds
|
||||
|
||||
$bmp = New-Object System.Drawing.Bitmap $screenBounds.Width, $screenBounds.Height
|
||||
$graphics = [System.Drawing.Graphics]::FromImage($bmp)
|
||||
$graphics.CopyFromScreen([System.Drawing.Point]::Empty, [System.Drawing.Point]::Empty, $screenBounds.Size)
|
||||
|
||||
$bmp.Save('${imagePath}')
|
||||
$graphics.Dispose()
|
||||
$bmp.Dispose()
|
||||
`;
|
||||
}
|
||||
|
||||
function encodeForPowershell(script: string): string {
|
||||
const buffer = Buffer.from(script, 'utf16le');
|
||||
return buffer.toString('base64');
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
import { runCommand } from '../../utils/run-command';
|
||||
import { log, LogLevel } from '../../utils/log';
|
||||
import { SupportedPlatform, CURRENT_PLATFORM } from '../../utils/platform';
|
||||
|
||||
export async function captureWindowTitles(processId: number) {
|
||||
if (!processId) { throw new Error('Missing process ID.'); }
|
||||
|
||||
const captureFunction = windowTitleCaptureFunctions[CURRENT_PLATFORM];
|
||||
if (!captureFunction) {
|
||||
log(`Cannot capture window title, unsupported OS: ${SupportedPlatform[CURRENT_PLATFORM]}`, LogLevel.Warn);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return captureFunction(processId);
|
||||
}
|
||||
|
||||
const windowTitleCaptureFunctions: {
|
||||
readonly [K in SupportedPlatform]: (processId: number) => Promise<string[]>;
|
||||
} = {
|
||||
[SupportedPlatform.macOS]: (processId) => captureTitlesOnMac(processId),
|
||||
[SupportedPlatform.Linux]: (processId) => captureTitlesOnLinux(processId),
|
||||
[SupportedPlatform.Windows]: (processId) => captureTitlesOnWindows(processId),
|
||||
};
|
||||
|
||||
async function captureTitlesOnWindows(processId: number): Promise<string[]> {
|
||||
if (!processId) { throw new Error('Missing process ID.'); }
|
||||
|
||||
const { stdout: tasklistOutput, error } = await runCommand(
|
||||
`tasklist /FI "PID eq ${processId}" /fo list /v`,
|
||||
);
|
||||
if (error) {
|
||||
log(`Failed to retrieve window title.\n${error}`, LogLevel.Warn);
|
||||
return [];
|
||||
}
|
||||
const regex = /Window Title:\s*(.*)/;
|
||||
const match = regex.exec(tasklistOutput);
|
||||
if (match && match.length > 1 && match[1]) {
|
||||
const title = match[1].trim();
|
||||
if (title === 'N/A') {
|
||||
return [];
|
||||
}
|
||||
return [title];
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
async function captureTitlesOnLinux(processId: number): Promise<string[]> {
|
||||
if (!processId) { throw new Error('Missing process ID.'); }
|
||||
|
||||
const { stdout: windowIdsOutput, error: windowIdError } = await runCommand(
|
||||
`xdotool search --pid '${processId}'`,
|
||||
);
|
||||
|
||||
if (windowIdError || !windowIdsOutput) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const windowIds = windowIdsOutput.trim().split('\n');
|
||||
|
||||
const titles = await Promise.all(windowIds.map(async (windowId) => {
|
||||
const { stdout: titleOutput, error: titleError } = await runCommand(
|
||||
`xprop -id ${windowId} | grep "WM_NAME(STRING)" | cut -d '=' -f 2 | sed 's/^[[:space:]]*"\\(.*\\)"[[:space:]]*$/\\1/'`,
|
||||
);
|
||||
if (titleError || !titleOutput) {
|
||||
return undefined;
|
||||
}
|
||||
return titleOutput.trim();
|
||||
}));
|
||||
|
||||
return titles.filter(Boolean);
|
||||
}
|
||||
|
||||
let hasAssistiveAccessOnMac = true;
|
||||
|
||||
async function captureTitlesOnMac(processId: number): Promise<string[]> {
|
||||
if (!processId) { throw new Error('Missing process ID.'); }
|
||||
if (!hasAssistiveAccessOnMac) {
|
||||
return [];
|
||||
}
|
||||
const script = `
|
||||
tell application "System Events"
|
||||
try
|
||||
set targetProcess to first process whose unix id is ${processId}
|
||||
on error
|
||||
return
|
||||
end try
|
||||
tell targetProcess
|
||||
set allWindowNames to {}
|
||||
repeat with aWindow in windows
|
||||
set end of allWindowNames to name of aWindow
|
||||
end repeat
|
||||
return allWindowNames
|
||||
end tell
|
||||
end tell
|
||||
`;
|
||||
const argument = script.trim()
|
||||
.split(/[\r\n]+/)
|
||||
.map((line) => `-e '${line.trim()}'`)
|
||||
.join(' ');
|
||||
|
||||
const { stdout: titleOutput, error } = await runCommand(`osascript ${argument}`);
|
||||
if (error) {
|
||||
let errorMessage = '';
|
||||
if (error.includes('-25211')) {
|
||||
errorMessage += 'Capturing window title requires assistive access. You do not have it.\n';
|
||||
hasAssistiveAccessOnMac = false;
|
||||
}
|
||||
errorMessage += error;
|
||||
log(errorMessage, LogLevel.Warn);
|
||||
return [];
|
||||
}
|
||||
const title = titleOutput?.trim();
|
||||
if (!title) {
|
||||
return [];
|
||||
}
|
||||
return [title];
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
import { log } from './utils/log';
|
||||
|
||||
export enum CommandLineFlag {
|
||||
ForceRebuild,
|
||||
TakeScreenshot,
|
||||
}
|
||||
|
||||
export const COMMAND_LINE_FLAGS: {
|
||||
readonly [key in CommandLineFlag]: string;
|
||||
} = Object.freeze({
|
||||
[CommandLineFlag.ForceRebuild]: '--build',
|
||||
[CommandLineFlag.TakeScreenshot]: '--screenshot',
|
||||
});
|
||||
|
||||
export function logCurrentArgs(): void {
|
||||
const processArguments = getProcessArguments();
|
||||
if (!processArguments.length) {
|
||||
log('No additional arguments provided.');
|
||||
return;
|
||||
}
|
||||
log(`Arguments: ${processArguments.join(', ')}`);
|
||||
}
|
||||
|
||||
export function hasCommandLineFlag(flag: CommandLineFlag): boolean {
|
||||
return getProcessArguments()
|
||||
.includes(COMMAND_LINE_FLAGS[flag]);
|
||||
}
|
||||
|
||||
/*
|
||||
Fetches process arguments dynamically each time the function is called.
|
||||
This design allows for runtime modifications to process.argv, supporting scenarios
|
||||
where the command-line arguments might be altered dynamically.
|
||||
*/
|
||||
function getProcessArguments(): string[] {
|
||||
return process.argv.slice(2);
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
import { join } from 'path';
|
||||
|
||||
export const DESKTOP_BUILD_COMMAND = 'npm run electron:prebuild && npm run electron:build -- --publish never';
|
||||
export const PROJECT_DIR = process.cwd();
|
||||
export const DESKTOP_DIST_PATH = join(PROJECT_DIR, 'dist');
|
||||
export const APP_EXECUTION_DURATION_IN_SECONDS = 60; // Long enough for CI runners
|
||||
export const SCREENSHOT_PATH = join(PROJECT_DIR, 'screenshot.png');
|
||||
@@ -0,0 +1,3 @@
|
||||
import { main } from './main';
|
||||
|
||||
await main();
|
||||
@@ -0,0 +1,72 @@
|
||||
import { logCurrentArgs, CommandLineFlag, hasCommandLineFlag } from './cli-args';
|
||||
import { log, die } from './utils/log';
|
||||
import { ensureNpmProjectDir, npmInstall, npmBuild } from './utils/npm';
|
||||
import { clearAppLogFiles } from './app/app-logs';
|
||||
import { checkForErrors } from './app/check-for-errors';
|
||||
import { runApplication } from './app/runner.js';
|
||||
import { CURRENT_PLATFORM, SupportedPlatform } from './utils/platform';
|
||||
import { prepareLinuxApp } from './app/extractors/linux';
|
||||
import { prepareWindowsApp } from './app/extractors/windows.js';
|
||||
import { prepareMacOsApp } from './app/extractors/macos';
|
||||
import {
|
||||
DESKTOP_BUILD_COMMAND,
|
||||
PROJECT_DIR,
|
||||
DESKTOP_DIST_PATH,
|
||||
APP_EXECUTION_DURATION_IN_SECONDS,
|
||||
SCREENSHOT_PATH,
|
||||
} from './config';
|
||||
import { indentText } from './utils/text';
|
||||
import { ExtractionResult } from './app/extractors/common/extraction-result';
|
||||
|
||||
export async function main(): Promise<void> {
|
||||
logCurrentArgs();
|
||||
await ensureNpmProjectDir(PROJECT_DIR);
|
||||
await npmInstall(PROJECT_DIR);
|
||||
await npmBuild(
|
||||
PROJECT_DIR,
|
||||
DESKTOP_BUILD_COMMAND,
|
||||
DESKTOP_DIST_PATH,
|
||||
hasCommandLineFlag(CommandLineFlag.ForceRebuild),
|
||||
);
|
||||
await clearAppLogFiles(PROJECT_DIR);
|
||||
const {
|
||||
stderr, stdout, isCrashed, windowTitles,
|
||||
} = await extractAndRun();
|
||||
if (stdout) {
|
||||
log(`Output (stdout) from application execution:\n${indentText(stdout, 1)}`);
|
||||
}
|
||||
if (isCrashed) {
|
||||
die('The application encountered an error during its execution.');
|
||||
}
|
||||
await checkForErrors(stderr, windowTitles, PROJECT_DIR);
|
||||
log('🥳🎈 Success! Application completed without any runtime errors.');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
async function extractAndRun() {
|
||||
const extractors: {
|
||||
readonly [K in SupportedPlatform]: () => Promise<ExtractionResult>;
|
||||
} = {
|
||||
[SupportedPlatform.macOS]: () => prepareMacOsApp(DESKTOP_DIST_PATH, PROJECT_DIR),
|
||||
[SupportedPlatform.Linux]: () => prepareLinuxApp(DESKTOP_DIST_PATH, PROJECT_DIR),
|
||||
[SupportedPlatform.Windows]: () => prepareWindowsApp(DESKTOP_DIST_PATH, PROJECT_DIR),
|
||||
};
|
||||
const extractor = extractors[CURRENT_PLATFORM];
|
||||
if (!extractor) {
|
||||
throw new Error(`Platform not supported: ${SupportedPlatform[CURRENT_PLATFORM]}`);
|
||||
}
|
||||
const { appExecutablePath, cleanup } = await extractor();
|
||||
try {
|
||||
return await runApplication(
|
||||
appExecutablePath,
|
||||
APP_EXECUTION_DURATION_IN_SECONDS,
|
||||
hasCommandLineFlag(CommandLineFlag.TakeScreenshot),
|
||||
SCREENSHOT_PATH,
|
||||
);
|
||||
} finally {
|
||||
if (cleanup) {
|
||||
log('Cleaning up post-execution resources...');
|
||||
await cleanup();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
import { readdir, access } from 'fs/promises';
|
||||
import { constants } from 'fs';
|
||||
|
||||
export async function exists(path: string): Promise<boolean> {
|
||||
if (!path) { throw new Error('Missing path'); }
|
||||
try {
|
||||
await access(path, constants.F_OK);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function isDirMissingOrEmpty(dir: string): Promise<boolean> {
|
||||
if (!dir) { throw new Error('Missing directory'); }
|
||||
if (!await exists(dir)) {
|
||||
return true;
|
||||
}
|
||||
const contents = await readdir(dir);
|
||||
return contents.length === 0;
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
export enum LogLevel {
|
||||
Info,
|
||||
Warn,
|
||||
Error,
|
||||
}
|
||||
|
||||
export function log(message: string, level = LogLevel.Info): void {
|
||||
const timestamp = new Date().toISOString();
|
||||
const config = LOG_LEVEL_CONFIG[level] || LOG_LEVEL_CONFIG[LogLevel.Info];
|
||||
const logLevelText = `${getColorCode(config.color)}${LOG_LEVEL_LABELS[level]}${getColorCode(TextColor.Reset)}`;
|
||||
const formattedMessage = `[${timestamp}][${logLevelText}] ${message}`;
|
||||
config.method(formattedMessage);
|
||||
}
|
||||
|
||||
export function die(message: string): never {
|
||||
log(message, LogLevel.Error);
|
||||
return process.exit(1);
|
||||
}
|
||||
|
||||
enum TextColor {
|
||||
Reset,
|
||||
LightRed,
|
||||
Yellow,
|
||||
LightBlue,
|
||||
}
|
||||
|
||||
function getColorCode(color: TextColor): string {
|
||||
return COLOR_CODE_MAPPING[color];
|
||||
}
|
||||
|
||||
const LOG_LEVEL_LABELS: {
|
||||
readonly [K in LogLevel]: string;
|
||||
} = {
|
||||
[LogLevel.Info]: 'INFO',
|
||||
[LogLevel.Error]: 'ERROR',
|
||||
[LogLevel.Warn]: 'WARN',
|
||||
};
|
||||
|
||||
const COLOR_CODE_MAPPING: {
|
||||
readonly [K in TextColor]: string;
|
||||
} = {
|
||||
[TextColor.Reset]: '\x1b[0m',
|
||||
[TextColor.LightRed]: '\x1b[91m',
|
||||
[TextColor.Yellow]: '\x1b[33m',
|
||||
[TextColor.LightBlue]: '\x1b[94m',
|
||||
};
|
||||
|
||||
interface ColorLevelConfig {
|
||||
readonly color: TextColor;
|
||||
readonly method: (...data: unknown[]) => void;
|
||||
}
|
||||
|
||||
const LOG_LEVEL_CONFIG: {
|
||||
readonly [K in LogLevel]: ColorLevelConfig;
|
||||
} = {
|
||||
[LogLevel.Info]: {
|
||||
color: TextColor.LightBlue,
|
||||
method: console.log,
|
||||
},
|
||||
[LogLevel.Warn]: {
|
||||
color: TextColor.Yellow,
|
||||
method: console.warn,
|
||||
},
|
||||
[LogLevel.Error]: {
|
||||
color: TextColor.LightRed,
|
||||
method: console.error,
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,127 @@
|
||||
import { join } from 'path';
|
||||
import { rm, readFile } from 'fs/promises';
|
||||
import { exists, isDirMissingOrEmpty } from './io';
|
||||
import { CommandResult, runCommand } from './run-command';
|
||||
import { LogLevel, die, log } from './log';
|
||||
import { sleep } from './sleep';
|
||||
import type { ExecOptions } from 'child_process';
|
||||
|
||||
const NPM_INSTALL_MAX_RETRIES = 3;
|
||||
const NPM_INSTALL_RETRY_DELAY_MS = 5 /* seconds */ * 1000;
|
||||
|
||||
export async function ensureNpmProjectDir(projectDir: string): Promise<void> {
|
||||
if (!projectDir) { throw new Error('missing project directory'); }
|
||||
if (!await exists(join(projectDir, 'package.json'))) {
|
||||
die(`\`package.json\` not found in project directory: ${projectDir}`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function npmInstall(projectDir: string): Promise<void> {
|
||||
if (!projectDir) { throw new Error('missing project directory'); }
|
||||
const npmModulesPath = join(projectDir, 'node_modules');
|
||||
if (!await isDirMissingOrEmpty(npmModulesPath)) {
|
||||
log(`Directory "${npmModulesPath}" exists and has content. Skipping \`npm install\`.`);
|
||||
return;
|
||||
}
|
||||
log('Starting dependency installation...');
|
||||
const { error } = await executeWithRetry('npm install --loglevel=error', {
|
||||
cwd: projectDir,
|
||||
}, NPM_INSTALL_MAX_RETRIES, NPM_INSTALL_RETRY_DELAY_MS);
|
||||
if (error) {
|
||||
die(error);
|
||||
}
|
||||
log('Installed dependencies...');
|
||||
}
|
||||
|
||||
export async function npmBuild(
|
||||
projectDir: string,
|
||||
buildCommand: string,
|
||||
distDir: string,
|
||||
forceRebuild: boolean,
|
||||
): Promise<void> {
|
||||
if (!projectDir) { throw new Error('missing project directory'); }
|
||||
if (!buildCommand) { throw new Error('missing build command'); }
|
||||
if (!distDir) { throw new Error('missing distribution directory'); }
|
||||
|
||||
const isMissingBuild = await isDirMissingOrEmpty(distDir);
|
||||
|
||||
if (!isMissingBuild && !forceRebuild) {
|
||||
log(`Directory "${distDir}" exists and has content. Skipping build: '${buildCommand}'.`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (forceRebuild) {
|
||||
log(`Removing directory "${distDir}" for a clean build (triggered by \`--build\` flag).`);
|
||||
await rm(distDir, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
log('Building project...');
|
||||
const { error } = await runCommand(buildCommand, {
|
||||
cwd: projectDir,
|
||||
});
|
||||
if (error) {
|
||||
log(error, LogLevel.Warn); // Cannot disable Vue CLI errors, stderr contains false-positives.
|
||||
}
|
||||
}
|
||||
|
||||
const appNameCache = new Map<string, string>();
|
||||
|
||||
export async function getAppName(projectDir: string): Promise<string> {
|
||||
if (!projectDir) { throw new Error('missing project directory'); }
|
||||
if (appNameCache.has(projectDir)) {
|
||||
return appNameCache.get(projectDir);
|
||||
}
|
||||
const packageData = await readPackageJsonContents(projectDir);
|
||||
try {
|
||||
const packageJson = JSON.parse(packageData);
|
||||
const name = packageJson.name as string;
|
||||
if (!name) {
|
||||
return die(`The \`package.json\` file doesn't specify a name: ${packageData}`);
|
||||
}
|
||||
appNameCache.set(projectDir, name);
|
||||
return name;
|
||||
} catch (error) {
|
||||
return die(`Unable to parse \`package.json\`. Error: ${error}\nContent: ${packageData}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function readPackageJsonContents(projectDir: string): Promise<string> {
|
||||
if (!projectDir) { throw new Error('missing project directory'); }
|
||||
const packagePath = join(projectDir, 'package.json');
|
||||
if (!await exists(packagePath)) {
|
||||
return die(`\`package.json\` file not found at ${packagePath}`);
|
||||
}
|
||||
try {
|
||||
const packageData = await readFile(packagePath, 'utf8');
|
||||
return packageData;
|
||||
} catch (error) {
|
||||
log(`Error reading \`package.json\` from ${packagePath}.`, LogLevel.Error);
|
||||
return die(`Error detail: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function executeWithRetry(
|
||||
command: string,
|
||||
options: ExecOptions,
|
||||
maxRetries: number,
|
||||
retryDelayInMs: number,
|
||||
currentAttempt = 1,
|
||||
): Promise<CommandResult> {
|
||||
const result = await runCommand(command, options);
|
||||
|
||||
if (!result.error || currentAttempt >= maxRetries) {
|
||||
return result;
|
||||
}
|
||||
|
||||
log(`Attempt ${currentAttempt} failed. Retrying in ${retryDelayInMs / 1000} seconds...`);
|
||||
await sleep(retryDelayInMs);
|
||||
|
||||
const retryResult = await executeWithRetry(
|
||||
command,
|
||||
options,
|
||||
maxRetries,
|
||||
retryDelayInMs,
|
||||
currentAttempt + 1,
|
||||
);
|
||||
return retryResult;
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
import { platform } from 'os';
|
||||
import { die } from './log';
|
||||
|
||||
export enum SupportedPlatform {
|
||||
macOS,
|
||||
Windows,
|
||||
Linux,
|
||||
}
|
||||
|
||||
const NODE_PLATFORM_MAPPINGS: {
|
||||
readonly [K in SupportedPlatform]: NodeJS.Platform;
|
||||
} = {
|
||||
[SupportedPlatform.macOS]: 'darwin',
|
||||
[SupportedPlatform.Linux]: 'linux',
|
||||
[SupportedPlatform.Windows]: 'win32',
|
||||
};
|
||||
|
||||
function findCurrentPlatform(): SupportedPlatform | undefined {
|
||||
const nodePlatform = platform();
|
||||
|
||||
for (const key of Object.keys(NODE_PLATFORM_MAPPINGS)) {
|
||||
const keyAsSupportedPlatform = parseInt(key, 10) as SupportedPlatform;
|
||||
if (NODE_PLATFORM_MAPPINGS[keyAsSupportedPlatform] === nodePlatform) {
|
||||
return keyAsSupportedPlatform;
|
||||
}
|
||||
}
|
||||
|
||||
return die(`Unsupported platform: ${nodePlatform}`);
|
||||
}
|
||||
|
||||
export const CURRENT_PLATFORM: SupportedPlatform = findCurrentPlatform();
|
||||
@@ -0,0 +1,58 @@
|
||||
import { exec } from 'child_process';
|
||||
import { indentText } from './text';
|
||||
import type { ExecOptions, ExecException } from 'child_process';
|
||||
|
||||
const TIMEOUT_IN_SECONDS = 180;
|
||||
const MAX_OUTPUT_BUFFER_SIZE = 1024 * 1024; // 1 MB
|
||||
|
||||
export function runCommand(
|
||||
command: string,
|
||||
options?: ExecOptions,
|
||||
): Promise<CommandResult> {
|
||||
return new Promise((resolve) => {
|
||||
options = {
|
||||
cwd: process.cwd(),
|
||||
timeout: TIMEOUT_IN_SECONDS * 1000,
|
||||
maxBuffer: MAX_OUTPUT_BUFFER_SIZE * 2,
|
||||
...(options ?? {}),
|
||||
};
|
||||
|
||||
exec(command, options, (error, stdout, stderr) => {
|
||||
let errorText: string | undefined;
|
||||
if (error || stderr?.length > 0) {
|
||||
errorText = formatError(command, error, stdout, stderr);
|
||||
}
|
||||
resolve({
|
||||
stdout,
|
||||
error: errorText,
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export interface CommandResult {
|
||||
readonly stdout: string;
|
||||
readonly error?: string;
|
||||
}
|
||||
|
||||
function formatError(
|
||||
command: string,
|
||||
error: ExecException | undefined,
|
||||
stdout: string | undefined,
|
||||
stderr: string | undefined,
|
||||
) {
|
||||
const errorParts = [
|
||||
'Error while running command.',
|
||||
`Command:\n${indentText(command, 1)}`,
|
||||
];
|
||||
if (error?.toString().trim()) {
|
||||
errorParts.push(`Error:\n${indentText(error.toString(), 1)}`);
|
||||
}
|
||||
if (stderr?.trim()) {
|
||||
errorParts.push(`stderr:\n${indentText(stderr, 1)}`);
|
||||
}
|
||||
if (stdout?.trim()) {
|
||||
errorParts.push(`stdout:\n${indentText(stdout, 1)}`);
|
||||
}
|
||||
return errorParts.join('\n---\n');
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
export function sleep(milliseconds: number) {
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(resolve, milliseconds);
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
export function indentText(
|
||||
text: string,
|
||||
indentLevel = 1,
|
||||
): string {
|
||||
validateText(text);
|
||||
const indentation = '\t'.repeat(indentLevel);
|
||||
return splitTextIntoLines(text)
|
||||
.map((line) => (line ? `${indentation}${line}` : line))
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
export function splitTextIntoLines(text: string): string[] {
|
||||
validateText(text);
|
||||
return text
|
||||
.split(/[\r\n]+/);
|
||||
}
|
||||
|
||||
function validateText(text: string): void {
|
||||
if (typeof text !== 'string') {
|
||||
throw new Error(`text is not a string. It is: ${typeof text}\n${text}`);
|
||||
}
|
||||
}
|
||||
40
tests/checks/desktop-runtime-errors/main.spec.ts
Normal file
40
tests/checks/desktop-runtime-errors/main.spec.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { test } from 'vitest';
|
||||
import { main } from './check-desktop-runtime-errors/main';
|
||||
import { COMMAND_LINE_FLAGS, CommandLineFlag } from './check-desktop-runtime-errors/cli-args';
|
||||
|
||||
test('should have no desktop runtime errors', async () => {
|
||||
// arrange
|
||||
setCommandLineFlagsFromEnvironmentVariables();
|
||||
let exitCode: number;
|
||||
global.process.exit = (code?: number): never => {
|
||||
exitCode = code;
|
||||
return undefined as never;
|
||||
};
|
||||
// act
|
||||
await main();
|
||||
// assert
|
||||
expect(exitCode).to.equal(0);
|
||||
}, {
|
||||
timeout: 60 /* minutes */ * 10000,
|
||||
});
|
||||
|
||||
/*
|
||||
Map environment variables to CLI arguments for compatibility with Vitest.
|
||||
*/
|
||||
function setCommandLineFlagsFromEnvironmentVariables() {
|
||||
const flagEnvironmentVariableKeyMappings: {
|
||||
readonly [key in CommandLineFlag]: string;
|
||||
} = {
|
||||
[CommandLineFlag.ForceRebuild]: 'BUILD',
|
||||
[CommandLineFlag.TakeScreenshot]: 'SCREENSHOT',
|
||||
};
|
||||
Object.entries(flagEnvironmentVariableKeyMappings)
|
||||
.forEach(([flag, environmentVariableKey]) => {
|
||||
if (process.env[environmentVariableKey] !== undefined) {
|
||||
process.argv = [
|
||||
...process.argv,
|
||||
COMMAND_LINE_FLAGS[flag],
|
||||
];
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -32,6 +32,7 @@ const DefaultOptions: IBatchRequestOptions = {
|
||||
},
|
||||
requestOptions: {
|
||||
retryExponentialBaseInMs: 5 /* sec */ * 1000,
|
||||
requestTimeoutInMs: 60 /* sec */ * 1000,
|
||||
additionalHeaders: {},
|
||||
},
|
||||
};
|
||||
@@ -1,18 +1,20 @@
|
||||
import fetch from 'cross-fetch';
|
||||
import { fetchWithTimeout } from './FetchWithTimeout';
|
||||
|
||||
export function fetchFollow(
|
||||
url: string,
|
||||
timeoutInMs: number,
|
||||
fetchOptions: RequestInit,
|
||||
followOptions: IFollowOptions,
|
||||
): Promise<Response> {
|
||||
followOptions = { ...DefaultOptions, ...followOptions };
|
||||
if (followRedirects(followOptions)) {
|
||||
return fetch(url, fetchOptions);
|
||||
return fetchWithTimeout(url, timeoutInMs, fetchOptions);
|
||||
}
|
||||
fetchOptions = { ...fetchOptions, redirect: 'manual' /* handled manually */ };
|
||||
const cookies = new CookieStorage(followOptions.enableCookies);
|
||||
return followRecursivelyWithCookies(
|
||||
url,
|
||||
timeoutInMs,
|
||||
fetchOptions,
|
||||
followOptions.maximumRedirectFollowDepth,
|
||||
cookies,
|
||||
@@ -33,12 +35,17 @@ const DefaultOptions: IFollowOptions = {
|
||||
|
||||
async function followRecursivelyWithCookies(
|
||||
url: string,
|
||||
timeoutInMs: number,
|
||||
options: RequestInit,
|
||||
followDepth: number,
|
||||
cookies: CookieStorage,
|
||||
): Promise<Response> {
|
||||
options = updateCookieHeader(cookies, options);
|
||||
const response = await fetch(url, options);
|
||||
const response = await fetchWithTimeout(
|
||||
url,
|
||||
timeoutInMs,
|
||||
options,
|
||||
);
|
||||
if (!isRedirect(response.status)) {
|
||||
return response;
|
||||
}
|
||||
@@ -49,7 +56,7 @@ async function followRecursivelyWithCookies(
|
||||
const cookieHeader = response.headers.get('set-cookie');
|
||||
cookies.addHeader(cookieHeader);
|
||||
const nextUrl = response.headers.get('location');
|
||||
return followRecursivelyWithCookies(nextUrl, options, newFollowDepth, cookies);
|
||||
return followRecursivelyWithCookies(nextUrl, timeoutInMs, options, newFollowDepth, cookies);
|
||||
}
|
||||
|
||||
function isRedirect(code: number): boolean {
|
||||
16
tests/checks/external-urls/StatusChecker/FetchWithTimeout.ts
Normal file
16
tests/checks/external-urls/StatusChecker/FetchWithTimeout.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import fetch from 'cross-fetch';
|
||||
|
||||
export async function fetchWithTimeout(
|
||||
url: string,
|
||||
timeoutInMs: number,
|
||||
init?: RequestInit,
|
||||
): Promise<Response> {
|
||||
const controller = new AbortController();
|
||||
const options: RequestInit = {
|
||||
...(init ?? {}),
|
||||
signal: controller.signal,
|
||||
};
|
||||
const promise = fetch(url, options);
|
||||
const timeout = setTimeout(() => controller.abort(), timeoutInMs);
|
||||
return promise.finally(() => clearTimeout(timeout));
|
||||
}
|
||||
111
tests/checks/external-urls/StatusChecker/README.md
Normal file
111
tests/checks/external-urls/StatusChecker/README.md
Normal file
@@ -0,0 +1,111 @@
|
||||
# status-checker
|
||||
|
||||
A CLI and SDK for checking the availability of external URLs.
|
||||
|
||||
🧐 Why?
|
||||
|
||||
- 🏃 **Fast**: Batch checks the statuses of URLs in parallel.
|
||||
- 🤖 **Easy-to-Use**: Zero-touch startup with pre-configured settings for reliable results, yet customizable.
|
||||
- 🤞 **Reliable**: Mimics real web browser behavior by following redirects and maintaining cookie storage.
|
||||
|
||||
🍭 Additional features
|
||||
|
||||
- 😇 **Rate Limiting**: Queues requests by domain to be polite.
|
||||
- 🔁 **Retries**: Implements retry pattern with exponential back-off.
|
||||
- ⌚ **Timeouts**: Configurable timeout for each request.
|
||||
- 🎭️ **User-Agent Rotation**: Change user agents for each request.
|
||||
|
||||
## CLI
|
||||
|
||||
Coming soon 🚧
|
||||
|
||||
## Programmatic usage
|
||||
|
||||
The SDK supports both Node.js and browser environments.
|
||||
|
||||
### `getUrlStatusesInParallel`
|
||||
|
||||
```js
|
||||
// Simple example
|
||||
const statuses = await getUrlStatusesInParallel([ 'https://privacy.sexy', /* ... */ ]);
|
||||
if(statuses.all((r) => r.code === 200)) {
|
||||
console.log('All URLs are alive!');
|
||||
} else {
|
||||
console.log('Dead URLs:', statuses.filter((r) => r.code !== 200).map((r) => r.url));
|
||||
}
|
||||
|
||||
// Fastest configuration
|
||||
const statuses = await getUrlStatusesInParallel([ 'https://privacy.sexy', /* ... */ ], {
|
||||
domainOptions: {
|
||||
sameDomainParallelize: false,
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
#### Batch request options
|
||||
|
||||
- `domainOptions`:
|
||||
- **`sameDomainParallelize`**, (*boolean*), default: `false`
|
||||
- Determines if requests to the same domain will be parallelized.
|
||||
- Setting to `false` makes all requests parallel.
|
||||
- Setting to `true` queues requests for each unique domain while parallelizing across different domains.
|
||||
- Requests to different domains are always parallelized regardless of this option.
|
||||
- 💡 This helps to avoid `429 Too Many Requests` and be nice to websites
|
||||
- **`sameDomainDelayInMs`** (*number*), default: `3000` (3 seconds)
|
||||
- Sets the delay between requests to the same domain.
|
||||
- `requestOptions` (*object*): See [request options](#request-options).
|
||||
|
||||
### `getUrlStatus`
|
||||
|
||||
Check the availability of a single URL.
|
||||
|
||||
```js
|
||||
// Simple example
|
||||
const status = await getUrlStatus('https://privacy.sexy');
|
||||
console.log(`Status code: ${status.code}`);
|
||||
```
|
||||
|
||||
#### Request options
|
||||
|
||||
- **`retryExponentialBaseInMs`** (*number*), default: `5000` (5 seconds)
|
||||
- Base time for the exponential back-off calculation for retries.
|
||||
- The longer the base time, the greater the intervals between retries.
|
||||
- **`additionalHeaders`** (*object*), default: `false`
|
||||
- Additional HTTP headers to send along with the default headers. Overrides default headers if specified.
|
||||
- **`followOptions`** (*object*): See [follow options](#follow-options).
|
||||
- **`requestTimeoutInMs`** (*number*), default: `60000` (60 seconds)
|
||||
- Time limit to abort the request if no response is received within the specified time frame.
|
||||
|
||||
### `fetchFollow`
|
||||
|
||||
Follows `3XX` redirects while preserving cookies.
|
||||
|
||||
Same fetch API except third parameter that specifies [follow options](#follow-options), `redirect: 'follow' | 'manual' | 'error'` is discarded in favor of the third parameter.
|
||||
|
||||
```js
|
||||
const status = await fetchFollow('https://privacy.sexy', {
|
||||
// First argument is same options as fetch API, except `redirect` options
|
||||
// that's discarded in favor of next argument follow options
|
||||
headers: {
|
||||
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0) Gecko/20100101 Firefox/47.0'
|
||||
},
|
||||
}, {
|
||||
// Second argument sets the redirect behavior
|
||||
followRedirects: true,
|
||||
maximumRedirectFollowDepth: 20,
|
||||
enableCookies: true,
|
||||
}
|
||||
);
|
||||
console.log(`Status code: ${status.code}`);
|
||||
```
|
||||
|
||||
#### Follow options
|
||||
|
||||
- **`followRedirects`** (*boolean*), default: `true`
|
||||
- Determines whether or not to follow redirects with `3XX` response codes.
|
||||
- **`maximumRedirectFollowDepth`** (*boolean*), default: `20`
|
||||
- Specifies the maximum number of sequential redirects that the function will follow.
|
||||
- 💡 Helps to solve maximum redirect reached errors.
|
||||
- **`enableCookies`** (*boolean*), default: `true`
|
||||
- Enables cookie storage to facilitate seamless navigation through login or other authentication challenges.
|
||||
- 💡 Helps to over-come sign-in challenges with callbacks.
|
||||
@@ -1,6 +1,7 @@
|
||||
import { retryWithExponentialBackOff } from './ExponentialBackOffRetryHandler';
|
||||
import { IUrlStatus } from './IUrlStatus';
|
||||
import { fetchFollow, IFollowOptions } from './FetchFollow';
|
||||
import { getRandomUserAgent } from './UserAgents';
|
||||
|
||||
export function getUrlStatus(
|
||||
url: string,
|
||||
@@ -12,7 +13,12 @@ export function getUrlStatus(
|
||||
console.log('Requesting', url);
|
||||
let result: IUrlStatus;
|
||||
try {
|
||||
const response = await fetchFollow(url, fetchOptions, options.followOptions);
|
||||
const response = await fetchFollow(
|
||||
url,
|
||||
options.requestTimeoutInMs,
|
||||
fetchOptions,
|
||||
options.followOptions,
|
||||
);
|
||||
result = { url, code: response.status };
|
||||
} catch (err) {
|
||||
result = { url, error: JSON.stringify(err, null, '\t') };
|
||||
@@ -26,32 +32,38 @@ export interface IRequestOptions {
|
||||
additionalHeaders?: Record<string, string>;
|
||||
additionalHeadersUrlIgnore?: string[];
|
||||
followOptions?: IFollowOptions;
|
||||
requestTimeoutInMs: number;
|
||||
}
|
||||
|
||||
const DefaultOptions: IRequestOptions = {
|
||||
retryExponentialBaseInMs: 5000,
|
||||
additionalHeaders: {},
|
||||
additionalHeadersUrlIgnore: [],
|
||||
requestTimeoutInMs: 60 /* seconds */ * 1000,
|
||||
};
|
||||
|
||||
function getFetchOptions(url: string, options: IRequestOptions): RequestInit {
|
||||
const additionalHeaders = options.additionalHeadersUrlIgnore
|
||||
.some((ignorePattern) => url.match(ignorePattern))
|
||||
.some((ignorePattern) => url.startsWith(ignorePattern))
|
||||
? {}
|
||||
: options.additionalHeaders;
|
||||
return {
|
||||
method: 'GET',
|
||||
headers: { ...DefaultHeaders, ...additionalHeaders },
|
||||
method: 'HEAD',
|
||||
headers: {
|
||||
...getDefaultHeaders(),
|
||||
...additionalHeaders,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const DefaultHeaders: Record<string, string> = {
|
||||
/* Chrome on macOS */
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36',
|
||||
'upgrade-insecure-requests': '1',
|
||||
connection: 'keep-alive',
|
||||
accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
|
||||
'accept-encoding': 'gzip, deflate, br',
|
||||
'cache-control': 'max-age=0',
|
||||
'accept-language': 'en-US,en;q=0.9',
|
||||
};
|
||||
function getDefaultHeaders(): Record<string, string> {
|
||||
return {
|
||||
'user-agent': getRandomUserAgent(),
|
||||
'upgrade-insecure-requests': '1',
|
||||
connection: 'keep-alive',
|
||||
accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
|
||||
'accept-encoding': 'gzip, deflate, br',
|
||||
'cache-control': 'max-age=0',
|
||||
'accept-language': 'en-US,en;q=0.9',
|
||||
};
|
||||
}
|
||||
75
tests/checks/external-urls/StatusChecker/UserAgents.ts
Normal file
75
tests/checks/external-urls/StatusChecker/UserAgents.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
export function getRandomUserAgent(): string {
|
||||
return UserAgents[Math.floor(Math.random() * UserAgents.length)];
|
||||
}
|
||||
|
||||
const UserAgents = [
|
||||
// Chrome
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537',
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537',
|
||||
|
||||
// Firefox
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:66.0) Gecko/20100101 Firefox/66.0',
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.1.2 Safari/605.1.15',
|
||||
|
||||
// Safari
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Safari/604.1',
|
||||
|
||||
// Internet Explorer
|
||||
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko',
|
||||
|
||||
// Edge
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3 Edge/15.0',
|
||||
|
||||
// Opera
|
||||
'Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14',
|
||||
|
||||
// iOS Devices
|
||||
'Mozilla/5.0 (iPhone; CPU iPhone OS 12_1 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/18.2b11866 Mobile/16B91 Safari/605.1.15',
|
||||
'Mozilla/5.0 (iPad; CPU OS 11_0 like Mac OS X) AppleWebKit/604.1.34 (KHTML, like Gecko) Version/11.0 Mobile/15A5341f Safari/604.1',
|
||||
|
||||
// Android Devices
|
||||
'Mozilla/5.0 (Linux; Android 7.0; SM-G930V Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.125 Mobile Safari/537.3',
|
||||
|
||||
// Other Devices/Browsers
|
||||
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.3',
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Safari/605.1.15',
|
||||
'Mozilla/5.0 (Windows Phone 10.0; Android 6.0.1; Microsoft; Lumia 950) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Mobile Safari/537.3 Edge/15.0',
|
||||
'Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko',
|
||||
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20100101 Firefox/54.0',
|
||||
'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0) Gecko/20100101 Firefox/47.0',
|
||||
'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.76 Mobile Safari/537.3',
|
||||
'Mozilla/5.0 (Linux; Android 7.0; SM-G930F Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.83 Mobile Safari/537.3',
|
||||
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.3',
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/11.1.2 Safari/605.1.15',
|
||||
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1',
|
||||
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.3 OPR/53.0.2907.99',
|
||||
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2)',
|
||||
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:46.0) Gecko/20120121 Firefox/46.0',
|
||||
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; Tablet PC 2.0)',
|
||||
'Mozilla/5.0 (Windows NT 5.1; rv:36.0) Gecko/20100101 Firefox/36.0',
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0',
|
||||
'Mozilla/5.0 (X11; Linux i686; rv:30.0) Gecko/20100101 Firefox/30.0',
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:28.0) Gecko/20100101 Firefox/28.0',
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.112 Safari/537.3',
|
||||
'Mozilla/5.0 (Windows NT 6.1; rv:27.3) Gecko/20130101 Firefox/27.3',
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/537.3 (KHTML, like Gecko) Chrome/22.0.1229.79 Safari/537.3',
|
||||
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.17 Safari/537.3',
|
||||
'Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:16.0) Gecko/20161202 Firefox/21.0.1',
|
||||
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:20.0) Gecko/20100101 Firefox/20.0',
|
||||
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0',
|
||||
'Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0',
|
||||
'Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0',
|
||||
'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.3',
|
||||
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.3 (KHTML, like Gecko) Chrome/22.0.1229.94 Safari/537.3',
|
||||
'Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.3',
|
||||
'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.3',
|
||||
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.3319.102 Safari/537.3',
|
||||
'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.3',
|
||||
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.517 Safari/537.3',
|
||||
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.3 (KHTML, like Gecko) Chrome/22.0.1229.94 Safari/537.3',
|
||||
'Mozilla/5.0 (X11; CrOS x86_64 4319.74.0) AppleWebKit/537.3 (KHTML, like Gecko) Chrome/22.0.1229.94 Safari/537.3',
|
||||
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.3 (KHTML, like Gecko) Chrome/22.0.1229.94 Safari/537.3',
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.3 (KHTML, like Gecko) Chrome/22.0.1229.94 Safari/537.3',
|
||||
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.3 (KHTML, like Gecko) Chrome/22.0.1229.94 Safari/537.3',
|
||||
'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.3 (KHTML, like Gecko) Chrome/22.0.1229.94 Safari/537.3',
|
||||
];
|
||||
50
tests/checks/external-urls/main.spec.ts
Normal file
50
tests/checks/external-urls/main.spec.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { test, expect } from 'vitest';
|
||||
import { parseApplication } from '@/application/Parser/ApplicationParser';
|
||||
import { IApplication } from '@/domain/IApplication';
|
||||
import { IUrlStatus } from './StatusChecker/IUrlStatus';
|
||||
import { getUrlStatusesInParallel, IBatchRequestOptions } from './StatusChecker/BatchStatusChecker';
|
||||
|
||||
const app = parseApplication();
|
||||
const urls = collectUniqueUrls(app);
|
||||
const requestOptions: IBatchRequestOptions = {
|
||||
domainOptions: {
|
||||
sameDomainParallelize: false, // be nice to our external servers
|
||||
sameDomainDelayInMs: 5 /* sec */ * 1000,
|
||||
},
|
||||
requestOptions: {
|
||||
retryExponentialBaseInMs: 3 /* sec */ * 1000,
|
||||
requestTimeoutInMs: 60 /* sec */ * 1000,
|
||||
additionalHeaders: { referer: app.info.homepage },
|
||||
},
|
||||
};
|
||||
const testTimeoutInMs = urls.length * 60 /* seconds */ * 1000;
|
||||
|
||||
test(`all URLs (${urls.length}) should be alive`, async () => {
|
||||
const results = await getUrlStatusesInParallel(urls, requestOptions);
|
||||
const deadUrls = results.filter((r) => r.code !== 200);
|
||||
expect(deadUrls).to.have.lengthOf(0, printUrls(deadUrls));
|
||||
}, testTimeoutInMs);
|
||||
|
||||
function collectUniqueUrls(application: IApplication): string[] {
|
||||
return [ // Get all nodes
|
||||
...application.collections.flatMap((c) => c.getAllCategories()),
|
||||
...application.collections.flatMap((c) => c.getAllScripts()),
|
||||
]
|
||||
// Get all docs
|
||||
.flatMap((documentable) => documentable.docs)
|
||||
// Parse all URLs
|
||||
.flatMap((docString) => docString.match(/(https?:\/\/[^\s`"<>()]+)/g) || [])
|
||||
// Remove duplicates
|
||||
.filter((url, index, array) => array.indexOf(url) === index);
|
||||
}
|
||||
|
||||
function printUrls(statuses: IUrlStatus[]): string {
|
||||
/* eslint-disable prefer-template */
|
||||
return '\n'
|
||||
+ statuses.map((status) => `- ${status.url}\n`
|
||||
+ (status.code ? `\tResponse code: ${status.code}` : '')
|
||||
+ (status.error ? `\tError: ${status.error}` : ''))
|
||||
.join('\n')
|
||||
+ '\n';
|
||||
/* eslint-enable prefer-template */
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { parseApplication } from '@/application/Parser/ApplicationParser';
|
||||
import { IApplication } from '@/domain/IApplication';
|
||||
import { IUrlStatus } from './StatusChecker/IUrlStatus';
|
||||
import { getUrlStatusesInParallel, IBatchRequestOptions } from './StatusChecker/BatchStatusChecker';
|
||||
|
||||
describe('collections', () => {
|
||||
// arrange
|
||||
const app = parseApplication();
|
||||
const urls = collectUniqueUrls(app);
|
||||
const options: IBatchRequestOptions = {
|
||||
domainOptions: {
|
||||
sameDomainParallelize: true, // no need to be so nice until sources start failing
|
||||
// sameDomainDelayInMs: 2 /* sec */ * 1000,
|
||||
},
|
||||
requestOptions: {
|
||||
retryExponentialBaseInMs: 3 /* sec */ * 1000,
|
||||
additionalHeaders: { referer: app.info.homepage },
|
||||
additionalHeadersUrlIgnore: [
|
||||
'http://batcmd.com/', // Otherwise it responds with 403
|
||||
],
|
||||
},
|
||||
};
|
||||
const testTimeoutInMs = urls.length * 60 /* minutes */ * 1000;
|
||||
it('have no dead urls', async () => {
|
||||
// act
|
||||
const results = await getUrlStatusesInParallel(urls, options);
|
||||
// assert
|
||||
const deadUrls = results.filter((r) => r.code !== 200);
|
||||
expect(deadUrls).to.have.lengthOf(0, printUrls(deadUrls));
|
||||
}, testTimeoutInMs);
|
||||
});
|
||||
|
||||
function collectUniqueUrls(app: IApplication): string[] {
|
||||
return [ // Get all nodes
|
||||
...app.collections.flatMap((c) => c.getAllCategories()),
|
||||
...app.collections.flatMap((c) => c.getAllScripts()),
|
||||
]
|
||||
// Get all docs
|
||||
.flatMap((documentable) => documentable.docs)
|
||||
// Parse all URLs
|
||||
.flatMap((docString) => docString.match(/(https?:\/\/[^\s]+)/g) || [])
|
||||
// Remove duplicates
|
||||
.filter((url, index, array) => array.indexOf(url) === index);
|
||||
}
|
||||
|
||||
function printUrls(statuses: IUrlStatus[]): string {
|
||||
/* eslint-disable prefer-template */
|
||||
return '\n'
|
||||
+ statuses.map((status) => `- ${status.url}\n`
|
||||
+ (status.code ? `\tResponse code: ${status.code}` : '')
|
||||
+ (status.error ? `\tError: ${status.error}` : ''))
|
||||
.join('\n')
|
||||
+ '\n';
|
||||
/* eslint-enable prefer-template */
|
||||
}
|
||||
@@ -1,108 +0,0 @@
|
||||
# status-checker
|
||||
|
||||
CLI and SDK to check whether an external URL is alive.
|
||||
|
||||
🧐 Why?
|
||||
|
||||
- 🏃🏻 Batch checking status of URLs in parallel.
|
||||
- 🤖 Zero-touch start, pre-configured for reliable results, still configurable.
|
||||
- 🤞 Reliable, mimics a real web browser by following redirect, and cookie storage.
|
||||
|
||||
🍭 Sweets such as
|
||||
|
||||
- 😇 Queueing requests by domain to be nice to them
|
||||
- 🔁 Retry pattern with exponential back-off
|
||||
|
||||
## CLI
|
||||
|
||||
Coming soon 🚧
|
||||
|
||||
## Programmatic usage
|
||||
|
||||
Programmatic usage is supported both on Node.js and browser.
|
||||
|
||||
### `getUrlStatusesInParallel`
|
||||
|
||||
```js
|
||||
// Simple example
|
||||
const statuses = await getUrlStatusesInParallel([ 'https://privacy.sexy', /* ... */ ]);
|
||||
if(statuses.all((r) => r.code === 200)) {
|
||||
console.log('All URLs are alive!');
|
||||
} else {
|
||||
console.log('Dead URLs:', statuses.filter((r) => r.code !== 200).map((r) => r.url));
|
||||
}
|
||||
|
||||
// Fastest configuration
|
||||
const statuses = await getUrlStatusesInParallel([ 'https://privacy.sexy', /* ... */ ], {
|
||||
domainOptions: {
|
||||
sameDomainParallelize: false,
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
#### Batch request options
|
||||
|
||||
- `domainOptions`:
|
||||
- **`sameDomainParallelize`**, (*boolean*), default: `false`
|
||||
- Determines whether the requests to URLs under same domain will be parallelize.
|
||||
- Setting `false` parallelizes all requests.
|
||||
- Setting `true` sends requests in queue for each unique domain, still parallelizing for different domains.
|
||||
- Requests to different domains are always parallelized regardless of this option.
|
||||
- 💡 This helps to avoid `429 Too Many Requests` and be nice to websites
|
||||
- **`sameDomainDelayInMs`** (*boolean*), default: `3000` (3 seconds)
|
||||
- Sets delay between requests to same host (domain) if same domain parallelization is disabled.
|
||||
- `requestOptions` (*object*): See [request options](#request-options).
|
||||
|
||||
### `getUrlStatus`
|
||||
|
||||
Checks whether single URL is dead or alive.
|
||||
|
||||
```js
|
||||
// Simple example
|
||||
const status = await getUrlStatus('https://privacy.sexy');
|
||||
console.log(`Status code: ${status.code}`);
|
||||
```
|
||||
|
||||
#### Request options
|
||||
|
||||
- **`retryExponentialBaseInMs`** (*boolean*), default: `5000` (5 seconds)
|
||||
- The based time that's multiplied by exponential value for exponential backoff and retry calculations
|
||||
- The longer it is, the longer the delay between retries are.
|
||||
- **`additionalHeaders`** (*boolean*), default: `false`
|
||||
- Additional headers that will be sent alongside default headers mimicking browser.
|
||||
- If default header are specified, additional headers override defaults.
|
||||
- **`followOptions`** (*object*): See [follow options](#follow-options).
|
||||
|
||||
### `fetchFollow`
|
||||
|
||||
Gets response from single URL by following `3XX` redirect targets by sending necessary cookies.
|
||||
|
||||
Same fetch API except third parameter that specifies [follow options](#follow-options), `redirect: 'follow' | 'manual' | 'error'` is discarded in favor of the third parameter.
|
||||
|
||||
```js
|
||||
const status = await fetchFollow('https://privacy.sexy', {
|
||||
// First argument is same options as fetch API, except `redirect` options
|
||||
// that's discarded in favor of next argument follow options
|
||||
headers: {
|
||||
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0) Gecko/20100101 Firefox/47.0'
|
||||
},
|
||||
}, {
|
||||
// Second argument sets the redirect behavior
|
||||
followRedirects: true,
|
||||
maximumRedirectFollowDepth: 20,
|
||||
enableCookies: true,
|
||||
}
|
||||
);
|
||||
console.log(`Status code: ${status.code}`);
|
||||
```
|
||||
|
||||
#### Follow options
|
||||
|
||||
- **`followRedirects`** (*boolean*), default: `true`
|
||||
- Determines whether redirects with `3XX` response code will be followed.
|
||||
- **`maximumRedirectFollowDepth`** (*boolean*), default: `20`
|
||||
- Determines maximum consequent redirects that will be followed.
|
||||
- 💡 Helps to solve maximum redirect reached errors.
|
||||
- **`enableCookies`** (*boolean*), default: `true`
|
||||
- Saves cookies requested to store by webpages and sends them when redirected.
|
||||
- 💡 Helps to over-come sign-in challenges with callbacks.
|
||||
Reference in New Issue
Block a user