diff --git a/lib/routes/ieee-security/sp.ts b/lib/routes/ieee-security/sp.ts
index 7121e6ffe8b7ec..d7627fcf647ad4 100644
--- a/lib/routes/ieee-security/sp.ts
+++ b/lib/routes/ieee-security/sp.ts
@@ -1,5 +1,5 @@
import { Route } from '@/types';
-import got from '@/utils/got';
+import ofetch from '@/utils/ofetch';
import { load } from 'cheerio';
import { parseDate } from '@/utils/parse-date';
const url = 'https://www.ieee-security.org/';
@@ -9,15 +9,6 @@ export const route: Route = {
path: '/security-privacy',
categories: ['journal'],
example: '/ieee-security/security-privacy',
- parameters: {},
- features: {
- requireConfig: false,
- requirePuppeteer: false,
- antiCrawler: false,
- supportBT: false,
- supportPodcast: false,
- supportScihub: false,
- },
radar: [
{
source: ['ieee-security.org/TC/SP-Index.html', 'ieee-security.org/'],
@@ -33,16 +24,16 @@ export const route: Route = {
async function handler() {
const last = new Date().getFullYear() + 1;
const urlList = Array.from({ length: last - 2020 }, (_, v) => `${url}TC/SP${v + 2020}/program-papers.html`);
- const responses = await got.all(urlList.map((url) => got(url)));
+ const responses = await Promise.allSettled(urlList.map((url) => ofetch(url)));
- const items = responses.map((response) => {
- const $ = load(response.data);
+ const items = responses.flatMap((response, i) => {
+ const $ = load(response.value);
return $('div.panel-body > div.list-group-item')
.toArray()
.map((item) => {
item = $(item);
const title = item.find('b').text().trim();
- const link = response.url;
+ const link = urlList[i];
return {
title,
author: item.html().trim().split('
')[1].trim(),
@@ -57,6 +48,6 @@ async function handler() {
link: `${url}TC/SP-Index.html`,
description: 'IEEE Symposium on Security and Privacy Accepted Papers',
allowEmpty: true,
- item: items.flat(),
+ item: items,
};
}
diff --git a/lib/routes/sigsac/ccs.ts b/lib/routes/sigsac/ccs.ts
index 2db3c39c5dd4e0..0647652739790c 100644
--- a/lib/routes/sigsac/ccs.ts
+++ b/lib/routes/sigsac/ccs.ts
@@ -1,5 +1,5 @@
import { Route } from '@/types';
-import got from '@/utils/got';
+import ofetch from '@/utils/ofetch';
import { load } from 'cheerio';
const url = 'https://www.sigsac.org/';
import { parseDate } from '@/utils/parse-date';
@@ -9,42 +9,36 @@ export const route: Route = {
path: '/ccs',
categories: ['journal'],
example: '/sigsac/ccs',
- parameters: {},
- features: {
- requireConfig: false,
- requirePuppeteer: false,
- antiCrawler: false,
- supportBT: false,
- supportPodcast: false,
- supportScihub: false,
- },
radar: [
{
source: ['sigsac.org/ccs.html', 'sigsac.org/'],
},
],
name: 'The ACM Conference on Computer and Communications Security',
- maintainers: [],
+ maintainers: ['ZeddYu'],
handler,
url: 'sigsac.org/ccs.html',
- description: `Return results from 2020`,
+ description: 'Return results from 2020',
};
async function handler() {
const last = new Date().getFullYear() + 1;
const yearList = Array.from({ length: last - 2020 }, (_, v) => `${url}ccs/CCS${v + 2020}/`);
- const yearResponses = await got.all(yearList.map((url) => got(url)));
+ const yearResponses = await Promise.allSettled(yearList.map((url) => ofetch(url)));
- const urlList = yearResponses.map((response) => {
- const $ = load(response.data);
- return new URL($('a:contains("Accepted Papers")').attr('href'), response.url).href;
- });
+ const urlList = yearResponses
+ .map((response, i) => {
+ const $ = load(response.value);
+ const href = $('a:contains("Accepted Papers")').attr('href');
+ return href && new URL($('a:contains("Accepted Papers")').attr('href')!, yearList[i]).href;
+ })
+ .filter(Boolean);
- const responses = await got.all(urlList.map((url) => got(url)));
+ const responses = await Promise.allSettled(urlList.map((url) => ofetch(url)));
- const items = responses.map((response) => {
- const $ = load(response.data);
- const link = response.url;
+ const items = responses.flatMap((response, i) => {
+ const $ = load(response.value);
+ const link = urlList[i];
const paperSection = $('div.papers-item')
.toArray()
.map((item) => {
@@ -78,6 +72,6 @@ async function handler() {
link: url,
description: 'The ACM Conference on Computer and Communications Security (CCS) Accepted Papers',
allowEmpty: true,
- item: items.flat(),
+ item: items,
};
}
diff --git a/lib/routes/usenix/usenix.ts b/lib/routes/usenix/usenix.ts
index 9af5aa236dd858..dec57b60fe051c 100644
--- a/lib/routes/usenix/usenix.ts
+++ b/lib/routes/usenix/usenix.ts
@@ -1,6 +1,6 @@
import { Route } from '@/types';
import cache from '@/utils/cache';
-import got from '@/utils/got';
+import ofetch from '@/utils/ofetch';
import { load } from 'cheerio';
const url = 'https://www.usenix.org';
import { parseDate } from '@/utils/parse-date';
@@ -11,15 +11,6 @@ export const route: Route = {
path: '/usenix-security-sympoium',
categories: ['journal'],
example: '/usenix/usenix-security-sympoium',
- parameters: {},
- features: {
- requireConfig: false,
- requirePuppeteer: false,
- antiCrawler: false,
- supportBT: false,
- supportPodcast: false,
- supportScihub: false,
- },
radar: [
{
source: ['usenix.org/conferences/all', 'usenix.org/conferences', 'usenix.org/'],
@@ -35,11 +26,11 @@ export const route: Route = {
async function handler() {
const last = new Date().getFullYear() + 1;
const urlList = Array.from({ length: last - 2020 }, (_, v) => `${url}/conference/usenixsecurity${v + 20}`).flatMap((url) => seasons.map((season) => `${url}/${season}-accepted-papers`));
- const responses = await got.all(
+ const responses = await Promise.allSettled(
urlList.map(async (url) => {
let res;
try {
- res = await got(url);
+ res = await ofetch(url);
} catch {
// ignore 404
}
@@ -47,27 +38,29 @@ async function handler() {
})
);
- const list = responses.filter(Boolean).flatMap((response) => {
- const $ = load(response.data);
- const pubDate = parseDate($('meta[property=article:modified_time]').attr('content'));
- return $('article.node-paper')
- .toArray()
- .map((item) => {
- item = $(item);
- return {
- title: item.find('h2.node-title > a').text().trim(),
- link: `${url}${item.find('h2.node-title > a').attr('href')}`,
- author: item.find('div.field.field-name-field-paper-people-text.field-type-text-long.field-label-hidden p').text().trim(),
- pubDate,
- };
- });
- });
+ const list = responses
+ .filter((r) => r.status === 'fulfilled' && r.value)
+ .flatMap((response) => {
+ const $ = load(response.value);
+ const pubDate = parseDate($('meta[property=article:modified_time]').attr('content'));
+ return $('article.node-paper')
+ .toArray()
+ .map((item) => {
+ item = $(item);
+ return {
+ title: item.find('h2.node-title > a').text().trim(),
+ link: `${url}${item.find('h2.node-title > a').attr('href')}`,
+ author: item.find('div.field.field-name-field-paper-people-text.field-type-text-long.field-label-hidden p').text().trim(),
+ pubDate,
+ };
+ });
+ });
- const items = await Promise.all(
+ const items = await Promise.allSettled(
list.map((item) =>
cache.tryGet(item.link, async () => {
- const response = await got(item.link);
- const $ = load(response.data);
+ const response = await ofetch(item.link);
+ const $ = load(response);
item.description = $('.content').html();
return item;
@@ -80,6 +73,6 @@ async function handler() {
link: url,
description: 'USENIX Security Symposium Accpeted Papers',
allowEmpty: true,
- item: items,
+ item: items.filter((r) => r.status === 'fulfilled').map((r) => (r as PromiseFulfilledResult).value),
};
}