Skip to content

Commit

Permalink
fix: algolia reupdate
Browse files Browse the repository at this point in the history
  • Loading branch information
robsongajunior committed Nov 4, 2024
1 parent 839312e commit 47f40cc
Show file tree
Hide file tree
Showing 2 changed files with 71 additions and 59 deletions.
7 changes: 4 additions & 3 deletions .github/workflows/prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,12 @@ jobs:
- name: BUILD
run: export NODE_OPTIONS="--max-old-space-size=8120" && export PUBLIC_GITHUB_TOKEN="${{ secrets.GH_PACKAGES_SECRET }}" && npm run build-production

- name: UPDATE Algolia data
run: npm install algoliasearch stream-json && node cicd/algolia-reindex.cjs app=${{ secrets.ALGOLIA_CONF_APP }} api=${{ secrets.ALGOLIA_CONF_API }}

- name: PREPARE TO DEPLOYMENT
run: rm -rf .eslintignore .eslintrc.js .prettierignore .prettierrc .nvmrc .npmrc pnpm-lock.yaml .astro .github node_modules src .editorconfig .gitignore astro.config.ts LICENSE package-lock.json package.json README.md tsconfig.json public plugins .git CODE_OF_CONDUCT.md helpcenter-cleaner.js postcss.config.js

- name: PREPARE DIST FOLDER
run: mv dist/* ./ && rm -rf dist; ls -la;

Expand All @@ -55,5 +58,3 @@ jobs:
- name: PURGE Documentation
run: npm install axios && node cicd/cdn-purge.js basic_token=${{ secrets.MKT_BASIC_TOKEN }}

- name: UPDATE Algolia data
run: npm install algoliasearch stream-json && node cicd/algolia-reindex.cjs app=${{ secrets.ALGOLIA_CONF_APP }} api=${{ secrets.ALGOLIA_CONF_API }}
123 changes: 67 additions & 56 deletions cicd/algolia-reindex.cjs
Original file line number Diff line number Diff line change
@@ -1,84 +1,94 @@
const fs = require('fs');
const args = require('node:process');
const algoliasearch = require("algoliasearch");
const algoliasearch = require('algoliasearch');
const StreamArray = require('stream-json/streamers/StreamArray');

function getArgAppId() {
let result = '';

args.argv.forEach((val) => {
let t = val.match(/app=(.*)/gi) || [];
if (t.length) {
result = t[0].replace(`app=`, '') || result;
}
});

return result;
}

function getArgApi() {
let result = '';

args.argv.forEach((val) => {
let t = val.match(/api=(.*)/gi) || [];
if (t.length) {
result = t[0].replace(`api=`, '') || result;
}
});

return result;
}

function uploadBatches(index, filepath) {
fs.stat(filepath, (error, stats) => {
if(!stats.isFile()) {
console.log(`[!] ${filepath} it is not a file or not exist.`);
return;
}

if(!index) {
throw '[!] An Algolia Index should be configured.';
}

let chunkLength = 0;
let chunkList = [];
let stream = fs.createReadStream(filepath).pipe(StreamArray.withParser());

index.clearObjects().then(() => {
stream.on('data', ({value}) => {
chunkLength++;
chunkList.push(value);

if(chunkList.length === 10000) {
stream.pause();

index.saveObjects(chunkList, {
autoGenerateObjectIDIfNotExist: true
}).then(() => {
chunkList = [];
stream.resume();
}).catch(function(error) {
throw error;
});
}
}).on('end', () => {
if (!chunkList.length) {
return;
}

index.saveObjects(chunkList, {
autoGenerateObjectIDIfNotExist: true
}).then(() => {
chunkList = [];
}).catch(function(error) {
throw error;
});
}).on('error', (err) => {
throw err;
if (error) {
console.error(error);
process.exit(1)
};
if (!stats.isFile) {
console.error(`[!] ${filepath} it is not a file or not exist.`);
process.exit(1)
}

let chunkLength = 0;
let chunkList = [];
let stream = fs.createReadStream(filepath).pipe(StreamArray.withParser());

if (!index) {
console.error('[!] An Algolia Index should be configured.')
process.exit(1)
}

index.clearObjects().then(() => {
stream.on('data', ({ value }) => {
chunkLength++;
chunkList.push(value);

if (chunkList.length === 10000) {
stream.pause();

index.saveObjects(chunkList, {
autoGenerateObjectIDIfNotExist: true
}).then(() => {
chunkList = [];
stream.resume();
}).catch(function (error) {
console.log(error)
process.exit(1)
});
}).catch(function(error) {
throw error;
}
}).on('end', () => {
if (!chunkList.length) {
return;
}

index.saveObjects(chunkList, {
autoGenerateObjectIDIfNotExist: true
}).then(() => {
console.log(`Success saveObjects:`, filepath)
chunkList = [];
}).catch(function (error) {
console.log(error)
process.exit(1)
});
}).on('error', (err) => {
console.error(`Error saveObjects:`, error)
process.exit(1)
});
}).catch(function (error) {
console.error(`Error clearObjects:`, error)
process.exit(1)
});
});
}

Expand All @@ -90,23 +100,24 @@ function uploadBatches(index, filepath) {

const config = {
key: {
app: getArgAppId(),
app: getArgAppId(),
api: getArgApi()
},
filepath: {
en: './en/doc-all-data.json', // moved to the root during github actions pipeline
ptbr: './pt-br/doc-all-data.json' // moved to the root during github actions pipeline
en: '.dist/en/doc-all-data.json',
ptbr: '.dist/pt-br/doc-all-data.json'
}
}

try {
if(!config.key.app || !config.key.api) {
if (!config.key.app || !config.key.api) {
throw '[!] config.appid or config.api are invalid params';
}

let client = algoliasearch(config.key.app, config.key.api);
uploadBatches(client.initIndex('azion-doc-en'), config.filepath.en);
uploadBatches(client.initIndex('azion-doc-ptbr'), config.filepath.ptbr);
} catch(err) {
} catch (err) {
console.log(err);
process.exit(1)
}

0 comments on commit 47f40cc

Please sign in to comment.