Skip to content

Commit

Permalink
fix(route): usenix (#15362)
Browse files Browse the repository at this point in the history
  • Loading branch information
TonyRL authored Apr 24, 2024
1 parent aba18ac commit 19dd960
Show file tree
Hide file tree
Showing 3 changed files with 46 additions and 68 deletions.
21 changes: 6 additions & 15 deletions lib/routes/ieee-security/sp.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { Route } from '@/types';
import got from '@/utils/got';
import ofetch from '@/utils/ofetch';
import { load } from 'cheerio';
import { parseDate } from '@/utils/parse-date';
const url = 'https://www.ieee-security.org/';
Expand All @@ -9,15 +9,6 @@ export const route: Route = {
path: '/security-privacy',
categories: ['journal'],
example: '/ieee-security/security-privacy',
parameters: {},
features: {
requireConfig: false,
requirePuppeteer: false,
antiCrawler: false,
supportBT: false,
supportPodcast: false,
supportScihub: false,
},
radar: [
{
source: ['ieee-security.org/TC/SP-Index.html', 'ieee-security.org/'],
Expand All @@ -33,16 +24,16 @@ export const route: Route = {
async function handler() {
const last = new Date().getFullYear() + 1;
const urlList = Array.from({ length: last - 2020 }, (_, v) => `${url}TC/SP${v + 2020}/program-papers.html`);
const responses = await got.all(urlList.map((url) => got(url)));
const responses = await Promise.allSettled(urlList.map((url) => ofetch(url)));

const items = responses.map((response) => {
const $ = load(response.data);
const items = responses.flatMap((response, i) => {
const $ = load(response.value);
return $('div.panel-body > div.list-group-item')
.toArray()
.map((item) => {
item = $(item);
const title = item.find('b').text().trim();
const link = response.url;
const link = urlList[i];
return {
title,
author: item.html().trim().split('<br>')[1].trim(),
Expand All @@ -57,6 +48,6 @@ async function handler() {
link: `${url}TC/SP-Index.html`,
description: 'IEEE Symposium on Security and Privacy Accepted Papers',
allowEmpty: true,
item: items.flat(),
item: items,
};
}
38 changes: 16 additions & 22 deletions lib/routes/sigsac/ccs.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { Route } from '@/types';
import got from '@/utils/got';
import ofetch from '@/utils/ofetch';
import { load } from 'cheerio';
const url = 'https://www.sigsac.org/';
import { parseDate } from '@/utils/parse-date';
Expand All @@ -9,42 +9,36 @@ export const route: Route = {
path: '/ccs',
categories: ['journal'],
example: '/sigsac/ccs',
parameters: {},
features: {
requireConfig: false,
requirePuppeteer: false,
antiCrawler: false,
supportBT: false,
supportPodcast: false,
supportScihub: false,
},
radar: [
{
source: ['sigsac.org/ccs.html', 'sigsac.org/'],
},
],
name: 'The ACM Conference on Computer and Communications Security',
maintainers: [],
maintainers: ['ZeddYu'],
handler,
url: 'sigsac.org/ccs.html',
description: `Return results from 2020`,
description: 'Return results from 2020',
};

async function handler() {
const last = new Date().getFullYear() + 1;
const yearList = Array.from({ length: last - 2020 }, (_, v) => `${url}ccs/CCS${v + 2020}/`);
const yearResponses = await got.all(yearList.map((url) => got(url)));
const yearResponses = await Promise.allSettled(yearList.map((url) => ofetch(url)));

const urlList = yearResponses.map((response) => {
const $ = load(response.data);
return new URL($('a:contains("Accepted Papers")').attr('href'), response.url).href;
});
const urlList = yearResponses
.map((response, i) => {
const $ = load(response.value);
const href = $('a:contains("Accepted Papers")').attr('href');
return href && new URL($('a:contains("Accepted Papers")').attr('href')!, yearList[i]).href;
})
.filter(Boolean);

const responses = await got.all(urlList.map((url) => got(url)));
const responses = await Promise.allSettled(urlList.map((url) => ofetch(url)));

const items = responses.map((response) => {
const $ = load(response.data);
const link = response.url;
const items = responses.flatMap((response, i) => {
const $ = load(response.value);
const link = urlList[i];
const paperSection = $('div.papers-item')
.toArray()
.map((item) => {
Expand Down Expand Up @@ -78,6 +72,6 @@ async function handler() {
link: url,
description: 'The ACM Conference on Computer and Communications Security (CCS) Accepted Papers',
allowEmpty: true,
item: items.flat(),
item: items,
};
}
55 changes: 24 additions & 31 deletions lib/routes/usenix/usenix.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { Route } from '@/types';
import cache from '@/utils/cache';
import got from '@/utils/got';
import ofetch from '@/utils/ofetch';
import { load } from 'cheerio';
const url = 'https://www.usenix.org';
import { parseDate } from '@/utils/parse-date';
Expand All @@ -11,15 +11,6 @@ export const route: Route = {
path: '/usenix-security-sympoium',
categories: ['journal'],
example: '/usenix/usenix-security-sympoium',
parameters: {},
features: {
requireConfig: false,
requirePuppeteer: false,
antiCrawler: false,
supportBT: false,
supportPodcast: false,
supportScihub: false,
},
radar: [
{
source: ['usenix.org/conferences/all', 'usenix.org/conferences', 'usenix.org/'],
Expand All @@ -35,39 +26,41 @@ export const route: Route = {
async function handler() {
const last = new Date().getFullYear() + 1;
const urlList = Array.from({ length: last - 2020 }, (_, v) => `${url}/conference/usenixsecurity${v + 20}`).flatMap((url) => seasons.map((season) => `${url}/${season}-accepted-papers`));
const responses = await got.all(
const responses = await Promise.allSettled(
urlList.map(async (url) => {
let res;
try {
res = await got(url);
res = await ofetch(url);
} catch {
// ignore 404
}
return res;
})
);

const list = responses.filter(Boolean).flatMap((response) => {
const $ = load(response.data);
const pubDate = parseDate($('meta[property=article:modified_time]').attr('content'));
return $('article.node-paper')
.toArray()
.map((item) => {
item = $(item);
return {
title: item.find('h2.node-title > a').text().trim(),
link: `${url}${item.find('h2.node-title > a').attr('href')}`,
author: item.find('div.field.field-name-field-paper-people-text.field-type-text-long.field-label-hidden p').text().trim(),
pubDate,
};
});
});
const list = responses
.filter((r) => r.status === 'fulfilled' && r.value)
.flatMap((response) => {
const $ = load(response.value);
const pubDate = parseDate($('meta[property=article:modified_time]').attr('content'));
return $('article.node-paper')
.toArray()
.map((item) => {
item = $(item);
return {
title: item.find('h2.node-title > a').text().trim(),
link: `${url}${item.find('h2.node-title > a').attr('href')}`,
author: item.find('div.field.field-name-field-paper-people-text.field-type-text-long.field-label-hidden p').text().trim(),
pubDate,
};
});
});

const items = await Promise.all(
const items = await Promise.allSettled(
list.map((item) =>
cache.tryGet(item.link, async () => {
const response = await got(item.link);
const $ = load(response.data);
const response = await ofetch(item.link);
const $ = load(response);
item.description = $('.content').html();

return item;
Expand All @@ -80,6 +73,6 @@ async function handler() {
link: url,
description: 'USENIX Security Symposium Accpeted Papers',
allowEmpty: true,
item: items,
item: items.filter((r) => r.status === 'fulfilled').map((r) => (r as PromiseFulfilledResult<any>).value),
};
}

0 comments on commit 19dd960

Please sign in to comment.