Refactor code to comply with ESLint rules
Major refactoring using ESLint with rules from AirBnb and Vue. Enable most of the ESLint rules and do necessary linting in the code. Also add more information for rules that are disabled to describe what they are and why they are disabled. Allow logging (`console.log`) in test files, and in development mode (e.g. when working with `npm run serve`), but disable it when environment is production (as pre-configured by Vue). Also add flag (`--mode production`) in `lint:eslint` command so production linting is executed earlier in lifecycle. Disable rules that requires a separate work. Such as ESLint rules that are broken in TypeScript: no-useless-constructor (eslint/eslint#14118) and no-shadow (eslint/eslint#13014).
This commit is contained in:
@@ -3,12 +3,12 @@ import { expect } from 'chai';
|
||||
import { parseApplication } from '@/application/Parser/ApplicationParser';
|
||||
|
||||
describe('ApplicationParser', () => {
|
||||
describe('parseApplication', () => {
|
||||
it('can parse current application', () => {
|
||||
// act
|
||||
const act = () => parseApplication();
|
||||
// assert
|
||||
expect(act).to.not.throw();
|
||||
});
|
||||
describe('parseApplication', () => {
|
||||
it('can parse current application', () => {
|
||||
// act
|
||||
const act = () => parseApplication();
|
||||
// assert
|
||||
expect(act).to.not.throw();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,46 +6,47 @@ import { IUrlStatus } from './StatusChecker/IUrlStatus';
|
||||
import { getUrlStatusesInParallel, IBatchRequestOptions } from './StatusChecker/BatchStatusChecker';
|
||||
|
||||
describe('collections', () => {
|
||||
// arrange
|
||||
const app = parseApplication();
|
||||
const urls = collectUniqueUrls(app);
|
||||
const options: IBatchRequestOptions = {
|
||||
domainOptions: {
|
||||
sameDomainParallelize: true, // no need to be so nice until sources start failing
|
||||
// sameDomainDelayInMs: 2 /* sec */ * 1000,
|
||||
},
|
||||
requestOptions: {
|
||||
retryExponentialBaseInMs: 3 /* sec */ * 1000,
|
||||
additionalHeaders: { referer: app.info.homepage },
|
||||
additionalHeadersUrlIgnore: [
|
||||
'http://batcmd.com/', // Otherwise it responds with 403
|
||||
],
|
||||
},
|
||||
};
|
||||
const testTimeoutInMs = urls.length * 60000 /* 1 minute */;
|
||||
it('have no dead urls', async () => {
|
||||
// act
|
||||
const results = await getUrlStatusesInParallel(urls, options);
|
||||
// assert
|
||||
const deadUrls = results.filter((r) => r.code !== 200);
|
||||
expect(deadUrls).to.have.lengthOf(0, printUrls(deadUrls));
|
||||
}).timeout(testTimeoutInMs);
|
||||
// arrange
|
||||
const app = parseApplication();
|
||||
const urls = collectUniqueUrls(app);
|
||||
const options: IBatchRequestOptions = {
|
||||
domainOptions: {
|
||||
sameDomainParallelize: true, // no need to be so nice until sources start failing
|
||||
// sameDomainDelayInMs: 2 /* sec */ * 1000,
|
||||
},
|
||||
requestOptions: {
|
||||
retryExponentialBaseInMs: 3 /* sec */ * 1000,
|
||||
additionalHeaders: { referer: app.info.homepage },
|
||||
additionalHeadersUrlIgnore: [
|
||||
'http://batcmd.com/', // Otherwise it responds with 403
|
||||
],
|
||||
},
|
||||
};
|
||||
const testTimeoutInMs = urls.length * 60 /* minutes */ * 1000;
|
||||
it('have no dead urls', async () => {
|
||||
// act
|
||||
const results = await getUrlStatusesInParallel(urls, options);
|
||||
// assert
|
||||
const deadUrls = results.filter((r) => r.code !== 200);
|
||||
expect(deadUrls).to.have.lengthOf(0, printUrls(deadUrls));
|
||||
}).timeout(testTimeoutInMs);
|
||||
});
|
||||
|
||||
function collectUniqueUrls(app: IApplication): string[] {
|
||||
return app
|
||||
.collections
|
||||
.flatMap((a) => a.getAllScripts())
|
||||
.flatMap((script) => script.documentationUrls)
|
||||
.filter((url, index, array) => array.indexOf(url) === index);
|
||||
return app
|
||||
.collections
|
||||
.flatMap((a) => a.getAllScripts())
|
||||
.flatMap((script) => script.documentationUrls)
|
||||
.filter((url, index, array) => array.indexOf(url) === index);
|
||||
}
|
||||
|
||||
function printUrls(statuses: IUrlStatus[]): string {
|
||||
return '\n' +
|
||||
statuses.map((status) =>
|
||||
`- ${status.url}\n` +
|
||||
(status.code ? `\tResponse code: ${status.code}` : '') +
|
||||
(status.error ? `\tException: ${JSON.stringify(status.error, null, '\t')}` : ''))
|
||||
.join(`\n`)
|
||||
+ '\n';
|
||||
/* eslint-disable prefer-template */
|
||||
return '\n'
|
||||
+ statuses.map((status) => `- ${status.url}\n`
|
||||
+ (status.code ? `\tResponse code: ${status.code}` : '')
|
||||
+ (status.error ? `\tError: ${status.error}` : ''))
|
||||
.join('\n')
|
||||
+ '\n';
|
||||
/* eslint-enable prefer-template */
|
||||
}
|
||||
|
||||
@@ -4,65 +4,71 @@ import { getUrlStatus, IRequestOptions } from './Requestor';
|
||||
import { groupUrlsByDomain } from './UrlPerDomainGrouper';
|
||||
|
||||
export async function getUrlStatusesInParallel(
|
||||
urls: string[],
|
||||
options?: IBatchRequestOptions): Promise<IUrlStatus[]> {
|
||||
// urls = [ 'https://privacy.sexy' ]; // Here to comment out when testing
|
||||
const uniqueUrls = Array.from(new Set(urls));
|
||||
options = { ...DefaultOptions, ...options };
|
||||
console.log('Options: ', options);
|
||||
const results = await request(uniqueUrls, options);
|
||||
return results;
|
||||
urls: string[],
|
||||
options?: IBatchRequestOptions,
|
||||
): Promise<IUrlStatus[]> {
|
||||
// urls = [ 'https://privacy.sexy' ]; // Here to comment out when testing
|
||||
const uniqueUrls = Array.from(new Set(urls));
|
||||
options = { ...DefaultOptions, ...options };
|
||||
console.log('Options: ', options);
|
||||
const results = await request(uniqueUrls, options);
|
||||
return results;
|
||||
}
|
||||
|
||||
export interface IBatchRequestOptions {
|
||||
domainOptions?: IDomainOptions;
|
||||
requestOptions?: IRequestOptions;
|
||||
domainOptions?: IDomainOptions;
|
||||
requestOptions?: IRequestOptions;
|
||||
}
|
||||
|
||||
interface IDomainOptions {
|
||||
sameDomainParallelize?: boolean;
|
||||
sameDomainDelayInMs?: number;
|
||||
sameDomainParallelize?: boolean;
|
||||
sameDomainDelayInMs?: number;
|
||||
}
|
||||
|
||||
const DefaultOptions: IBatchRequestOptions = {
|
||||
domainOptions: {
|
||||
sameDomainParallelize: false,
|
||||
sameDomainDelayInMs: 3 /* sec */ * 1000,
|
||||
},
|
||||
requestOptions: {
|
||||
retryExponentialBaseInMs: 5 /* sec */ * 1000,
|
||||
additionalHeaders: {},
|
||||
},
|
||||
domainOptions: {
|
||||
sameDomainParallelize: false,
|
||||
sameDomainDelayInMs: 3 /* sec */ * 1000,
|
||||
},
|
||||
requestOptions: {
|
||||
retryExponentialBaseInMs: 5 /* sec */ * 1000,
|
||||
additionalHeaders: {},
|
||||
},
|
||||
};
|
||||
|
||||
function request(urls: string[], options: IBatchRequestOptions): Promise<IUrlStatus[]> {
|
||||
if (!options.domainOptions.sameDomainParallelize) {
|
||||
return runOnEachDomainWithDelay(
|
||||
urls,
|
||||
(url) => getUrlStatus(url, options.requestOptions),
|
||||
options.domainOptions.sameDomainDelayInMs);
|
||||
} else {
|
||||
return Promise.all(
|
||||
urls.map((url) => getUrlStatus(url, options.requestOptions)));
|
||||
}
|
||||
function request(
|
||||
urls: string[],
|
||||
options: IBatchRequestOptions,
|
||||
): Promise<IUrlStatus[]> {
|
||||
if (!options.domainOptions.sameDomainParallelize) {
|
||||
return runOnEachDomainWithDelay(
|
||||
urls,
|
||||
(url) => getUrlStatus(url, options.requestOptions),
|
||||
options.domainOptions.sameDomainDelayInMs,
|
||||
);
|
||||
}
|
||||
return Promise.all(urls.map((url) => getUrlStatus(url, options.requestOptions)));
|
||||
}
|
||||
|
||||
async function runOnEachDomainWithDelay(
|
||||
urls: string[],
|
||||
action: (url: string) => Promise<IUrlStatus>,
|
||||
delayInMs: number): Promise<IUrlStatus[]> {
|
||||
const grouped = groupUrlsByDomain(urls);
|
||||
const tasks = grouped.map(async (group) => {
|
||||
const results = new Array<IUrlStatus>();
|
||||
for (const url of group) {
|
||||
const status = await action(url);
|
||||
results.push(status);
|
||||
if (results.length !== group.length) {
|
||||
await sleep(delayInMs);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
});
|
||||
const r = await Promise.all(tasks);
|
||||
return r.flat();
|
||||
urls: string[],
|
||||
action: (url: string) => Promise<IUrlStatus>,
|
||||
delayInMs: number,
|
||||
): Promise<IUrlStatus[]> {
|
||||
const grouped = groupUrlsByDomain(urls);
|
||||
const tasks = grouped.map(async (group) => {
|
||||
const results = new Array<IUrlStatus>();
|
||||
/* eslint-disable no-await-in-loop */
|
||||
for (const url of group) {
|
||||
const status = await action(url);
|
||||
results.push(status);
|
||||
if (results.length !== group.length) {
|
||||
await sleep(delayInMs);
|
||||
}
|
||||
}
|
||||
/* eslint-enable no-await-in-loop */
|
||||
return results;
|
||||
});
|
||||
const r = await Promise.all(tasks);
|
||||
return r.flat();
|
||||
}
|
||||
|
||||
@@ -4,41 +4,45 @@ import { IUrlStatus } from './IUrlStatus';
|
||||
const DefaultBaseRetryIntervalInMs = 5 /* sec */ * 1000;
|
||||
|
||||
export async function retryWithExponentialBackOff(
|
||||
action: () => Promise<IUrlStatus>,
|
||||
baseRetryIntervalInMs: number = DefaultBaseRetryIntervalInMs,
|
||||
currentRetry = 1): Promise<IUrlStatus> {
|
||||
const maxTries: number = 3;
|
||||
const status = await action();
|
||||
if (shouldRetry(status)) {
|
||||
if (currentRetry <= maxTries) {
|
||||
const exponentialBackOffInMs = getRetryTimeoutInMs(currentRetry, baseRetryIntervalInMs);
|
||||
// tslint:disable-next-line: no-console
|
||||
console.log(`Retrying (${currentRetry}) in ${exponentialBackOffInMs / 1000} seconds`, status);
|
||||
await sleep(exponentialBackOffInMs);
|
||||
return retryWithExponentialBackOff(action, baseRetryIntervalInMs, currentRetry + 1);
|
||||
}
|
||||
action: () => Promise<IUrlStatus>,
|
||||
baseRetryIntervalInMs: number = DefaultBaseRetryIntervalInMs,
|
||||
currentRetry = 1,
|
||||
): Promise<IUrlStatus> {
|
||||
const maxTries = 3;
|
||||
const status = await action();
|
||||
if (shouldRetry(status)) {
|
||||
if (currentRetry <= maxTries) {
|
||||
const exponentialBackOffInMs = getRetryTimeoutInMs(currentRetry, baseRetryIntervalInMs);
|
||||
// tslint:disable-next-line: no-console
|
||||
console.log(`Retrying (${currentRetry}) in ${exponentialBackOffInMs / 1000} seconds`, status);
|
||||
await sleep(exponentialBackOffInMs);
|
||||
return retryWithExponentialBackOff(action, baseRetryIntervalInMs, currentRetry + 1);
|
||||
}
|
||||
return status;
|
||||
}
|
||||
return status;
|
||||
}
|
||||
|
||||
function shouldRetry(status: IUrlStatus) {
|
||||
if (status.error) {
|
||||
return true;
|
||||
}
|
||||
return isTransientError(status.code)
|
||||
|| status.code === 429; // Too Many Requests
|
||||
if (status.error) {
|
||||
return true;
|
||||
}
|
||||
return isTransientError(status.code)
|
||||
|| status.code === 429; // Too Many Requests
|
||||
}
|
||||
|
||||
function isTransientError(statusCode: number) {
|
||||
return statusCode >= 500 && statusCode <= 599;
|
||||
return statusCode >= 500 && statusCode <= 599;
|
||||
}
|
||||
|
||||
function getRetryTimeoutInMs(currentRetry: number, baseRetryIntervalInMs: number = DefaultBaseRetryIntervalInMs) {
|
||||
const retryRandomFactor = 0.5; // Retry intervals are between 50% and 150%
|
||||
// of the exponentially increasing base amount
|
||||
const minRandom = 1 - retryRandomFactor;
|
||||
const maxRandom = 1 + retryRandomFactor;
|
||||
const randomization = (Math.random() * (maxRandom - minRandom)) + maxRandom;
|
||||
const exponential = Math.pow(2, currentRetry - 1);
|
||||
return Math.ceil(exponential * baseRetryIntervalInMs * randomization);
|
||||
function getRetryTimeoutInMs(
|
||||
currentRetry: number,
|
||||
baseRetryIntervalInMs: number = DefaultBaseRetryIntervalInMs,
|
||||
) {
|
||||
const retryRandomFactor = 0.5; // Retry intervals are between 50% and 150%
|
||||
// of the exponentially increasing base amount
|
||||
const minRandom = 1 - retryRandomFactor;
|
||||
const maxRandom = 1 + retryRandomFactor;
|
||||
const randomization = (Math.random() * (maxRandom - minRandom)) + maxRandom;
|
||||
const exponential = 2 ** (currentRetry - 1);
|
||||
return Math.ceil(exponential * baseRetryIntervalInMs * randomization);
|
||||
}
|
||||
|
||||
@@ -1,66 +1,100 @@
|
||||
import fetch from 'cross-fetch';
|
||||
|
||||
export function fetchFollow(
|
||||
url: string, fetchOptions: RequestInit, followOptions: IFollowOptions): Promise<Response> {
|
||||
followOptions = { ...DefaultOptions, ...followOptions };
|
||||
if (!followOptions.followRedirects
|
||||
|| followOptions.maximumRedirectFollowDepth === 0) {
|
||||
return fetch(url, fetchOptions);
|
||||
}
|
||||
fetchOptions = { ...fetchOptions, redirect: 'manual' /* handled manually */ };
|
||||
const cookies = new CookieStorage(followOptions.enableCookies);
|
||||
return followRecursivelyWithCookies(
|
||||
url, fetchOptions, followOptions.maximumRedirectFollowDepth, cookies);
|
||||
url: string,
|
||||
fetchOptions: RequestInit,
|
||||
followOptions: IFollowOptions,
|
||||
): Promise<Response> {
|
||||
followOptions = { ...DefaultOptions, ...followOptions };
|
||||
if (followRedirects(followOptions)) {
|
||||
return fetch(url, fetchOptions);
|
||||
}
|
||||
fetchOptions = { ...fetchOptions, redirect: 'manual' /* handled manually */ };
|
||||
const cookies = new CookieStorage(followOptions.enableCookies);
|
||||
return followRecursivelyWithCookies(
|
||||
url,
|
||||
fetchOptions,
|
||||
followOptions.maximumRedirectFollowDepth,
|
||||
cookies,
|
||||
);
|
||||
}
|
||||
|
||||
export interface IFollowOptions {
|
||||
followRedirects?: boolean;
|
||||
maximumRedirectFollowDepth?: number;
|
||||
enableCookies?: boolean;
|
||||
followRedirects?: boolean;
|
||||
maximumRedirectFollowDepth?: number;
|
||||
enableCookies?: boolean;
|
||||
}
|
||||
|
||||
const DefaultOptions: IFollowOptions = {
|
||||
followRedirects: true,
|
||||
maximumRedirectFollowDepth: 20,
|
||||
enableCookies: true,
|
||||
followRedirects: true,
|
||||
maximumRedirectFollowDepth: 20,
|
||||
enableCookies: true,
|
||||
};
|
||||
|
||||
async function followRecursivelyWithCookies(
|
||||
url: string, options: RequestInit, followDepth: number, cookies: CookieStorage): Promise<Response> {
|
||||
if (cookies.hasAny()) {
|
||||
options = { ...options, headers: { ...options.headers, cookie: cookies.getHeader() } };
|
||||
}
|
||||
const response = await fetch(url, options);
|
||||
if (!isRedirect(response.status)) {
|
||||
return response;
|
||||
}
|
||||
if (--followDepth < 0) {
|
||||
throw new Error(`[max-redirect] maximum redirect reached at: ${url}`);
|
||||
}
|
||||
const cookieHeader = response.headers.get('set-cookie');
|
||||
cookies.addHeader(cookieHeader);
|
||||
const nextUrl = response.headers.get('location');
|
||||
return followRecursivelyWithCookies(nextUrl, options, followDepth, cookies);
|
||||
url: string,
|
||||
options: RequestInit,
|
||||
followDepth: number,
|
||||
cookies: CookieStorage,
|
||||
): Promise<Response> {
|
||||
options = updateCookieHeader(cookies, options);
|
||||
const response = await fetch(url, options);
|
||||
if (!isRedirect(response.status)) {
|
||||
return response;
|
||||
}
|
||||
const newFollowDepth = followDepth - 1;
|
||||
if (newFollowDepth < 0) {
|
||||
throw new Error(`[max-redirect] maximum redirect reached at: ${url}`);
|
||||
}
|
||||
const cookieHeader = response.headers.get('set-cookie');
|
||||
cookies.addHeader(cookieHeader);
|
||||
const nextUrl = response.headers.get('location');
|
||||
return followRecursivelyWithCookies(nextUrl, options, newFollowDepth, cookies);
|
||||
}
|
||||
|
||||
function isRedirect(code: number): boolean {
|
||||
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
|
||||
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
|
||||
}
|
||||
|
||||
class CookieStorage {
|
||||
public cookies = new Array<string>();
|
||||
constructor(private readonly enabled: boolean) {
|
||||
}
|
||||
public hasAny() {
|
||||
return this.enabled && this.cookies.length > 0;
|
||||
}
|
||||
public addHeader(header: string) {
|
||||
if (!this.enabled || !header) {
|
||||
return;
|
||||
}
|
||||
this.cookies.push(header);
|
||||
}
|
||||
public getHeader() {
|
||||
return this.cookies.join(' ; ');
|
||||
public cookies = new Array<string>();
|
||||
|
||||
constructor(private readonly enabled: boolean) {
|
||||
}
|
||||
|
||||
public hasAny() {
|
||||
return this.enabled && this.cookies.length > 0;
|
||||
}
|
||||
|
||||
public addHeader(header: string) {
|
||||
if (!this.enabled || !header) {
|
||||
return;
|
||||
}
|
||||
this.cookies.push(header);
|
||||
}
|
||||
|
||||
public getHeader() {
|
||||
return this.cookies.join(' ; ');
|
||||
}
|
||||
}
|
||||
|
||||
function followRedirects(options: IFollowOptions) {
|
||||
if (!options.followRedirects) {
|
||||
return false;
|
||||
}
|
||||
if (options.maximumRedirectFollowDepth === 0) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function updateCookieHeader(
|
||||
cookies: CookieStorage,
|
||||
options: RequestInit,
|
||||
): RequestInit {
|
||||
if (!cookies.hasAny()) {
|
||||
return options;
|
||||
}
|
||||
const newOptions = { ...options, headers: { ...options.headers, cookie: cookies.getHeader() } };
|
||||
return newOptions;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export interface IUrlStatus {
|
||||
url: string;
|
||||
error?: any;
|
||||
code?: number;
|
||||
url: string;
|
||||
error?: string;
|
||||
code?: number;
|
||||
}
|
||||
|
||||
@@ -2,53 +2,56 @@ import { retryWithExponentialBackOff } from './ExponentialBackOffRetryHandler';
|
||||
import { IUrlStatus } from './IUrlStatus';
|
||||
import { fetchFollow, IFollowOptions } from './FetchFollow';
|
||||
|
||||
export async function getUrlStatus(
|
||||
url: string,
|
||||
options: IRequestOptions = DefaultOptions): Promise<IUrlStatus> {
|
||||
options = { ...DefaultOptions, ...options };
|
||||
const fetchOptions = getFetchOptions(url, options);
|
||||
return retryWithExponentialBackOff(async () => {
|
||||
console.log('Requesting', url);
|
||||
let result: IUrlStatus;
|
||||
try {
|
||||
const response = await fetchFollow(url, fetchOptions, options.followOptions);
|
||||
result = { url, code: response.status };
|
||||
} catch (err) {
|
||||
result = { url, error: err };
|
||||
}
|
||||
return result;
|
||||
}, options.retryExponentialBaseInMs);
|
||||
export function getUrlStatus(
|
||||
url: string,
|
||||
options: IRequestOptions = DefaultOptions,
|
||||
): Promise<IUrlStatus> {
|
||||
options = { ...DefaultOptions, ...options };
|
||||
const fetchOptions = getFetchOptions(url, options);
|
||||
return retryWithExponentialBackOff(async () => {
|
||||
console.log('Requesting', url);
|
||||
let result: IUrlStatus;
|
||||
try {
|
||||
const response = await fetchFollow(url, fetchOptions, options.followOptions);
|
||||
result = { url, code: response.status };
|
||||
} catch (err) {
|
||||
result = { url, error: JSON.stringify(err, null, '\t') };
|
||||
}
|
||||
return result;
|
||||
}, options.retryExponentialBaseInMs);
|
||||
}
|
||||
|
||||
export interface IRequestOptions {
|
||||
retryExponentialBaseInMs?: number;
|
||||
additionalHeaders?: Record<string, string>;
|
||||
additionalHeadersUrlIgnore?: string[];
|
||||
followOptions?: IFollowOptions;
|
||||
retryExponentialBaseInMs?: number;
|
||||
additionalHeaders?: Record<string, string>;
|
||||
additionalHeadersUrlIgnore?: string[];
|
||||
followOptions?: IFollowOptions;
|
||||
}
|
||||
|
||||
const DefaultOptions: IRequestOptions = {
|
||||
retryExponentialBaseInMs: 5000,
|
||||
additionalHeaders: {},
|
||||
additionalHeadersUrlIgnore: [],
|
||||
retryExponentialBaseInMs: 5000,
|
||||
additionalHeaders: {},
|
||||
additionalHeadersUrlIgnore: [],
|
||||
};
|
||||
|
||||
function getFetchOptions(url: string, options: IRequestOptions): RequestInit {
|
||||
const additionalHeaders = options.additionalHeadersUrlIgnore.some(
|
||||
(ignorePattern) => url.match(ignorePattern)) ? {} : options.additionalHeaders;
|
||||
return {
|
||||
method: 'GET',
|
||||
headers: { ...DefaultHeaders, ...additionalHeaders },
|
||||
};
|
||||
const additionalHeaders = options.additionalHeadersUrlIgnore
|
||||
.some((ignorePattern) => url.match(ignorePattern))
|
||||
? {}
|
||||
: options.additionalHeaders;
|
||||
return {
|
||||
method: 'GET',
|
||||
headers: { ...DefaultHeaders, ...additionalHeaders },
|
||||
};
|
||||
}
|
||||
|
||||
const DefaultHeaders: Record<string, string> = {
|
||||
/* Chrome on macOS */
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36',
|
||||
'upgrade-insecure-requests': '1',
|
||||
'connection': 'keep-alive',
|
||||
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
|
||||
'accept-encoding': 'gzip, deflate, br',
|
||||
'cache-control': 'max-age=0',
|
||||
'accept-language': 'en-US,en;q=0.9',
|
||||
/* Chrome on macOS */
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36',
|
||||
'upgrade-insecure-requests': '1',
|
||||
connection: 'keep-alive',
|
||||
accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
|
||||
'accept-encoding': 'gzip, deflate, br',
|
||||
'cache-control': 'max-age=0',
|
||||
'accept-language': 'en-US,en;q=0.9',
|
||||
};
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
export function groupUrlsByDomain(urls: string[]): string[][] {
|
||||
const domains = new Set<string>();
|
||||
const urlsWithDomain = urls.map((url) => ({
|
||||
url,
|
||||
domain: extractDomain(url),
|
||||
}));
|
||||
for (const url of urlsWithDomain) {
|
||||
domains.add(url.domain);
|
||||
}
|
||||
return Array.from(domains).map((domain) => {
|
||||
return urlsWithDomain
|
||||
.filter((url) => url.domain === domain)
|
||||
.map((url) => url.url);
|
||||
});
|
||||
const domains = new Set<string>();
|
||||
const urlsWithDomain = urls.map((url) => ({
|
||||
url,
|
||||
domain: extractDomain(url),
|
||||
}));
|
||||
for (const url of urlsWithDomain) {
|
||||
domains.add(url.domain);
|
||||
}
|
||||
return Array.from(domains).map((domain) => {
|
||||
return urlsWithDomain
|
||||
.filter((url) => url.domain === domain)
|
||||
.map((url) => url.url);
|
||||
});
|
||||
}
|
||||
|
||||
function extractDomain(url: string): string {
|
||||
return url.split('://')[1].split('/')[0].toLowerCase();
|
||||
return url.split('://')[1].split('/')[0].toLowerCase();
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user