Skip to content

Commit

Permalink
chore: token limit is configuration through the configuration file
Browse files Browse the repository at this point in the history
  • Loading branch information
gentlementlegen committed Dec 29, 2024
1 parent 3cbd808 commit a0695e3
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 5 deletions.
6 changes: 6 additions & 0 deletions src/configuration/content-evaluator-config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,12 @@ const openAiType = Type.Object(
description: "OpenAI model, e.g. gpt-4o",
examples: ["gpt-4o"],
}),
tokenCountLimit: Type.Integer({
default: 124000,
description:
"Token count limit for a given model. If the content goes beyond the token limit, content will get truncated during evaluation.",
examples: [124000],
}),
/**
* Specific endpoint to send the comments to.
*/
Expand Down
8 changes: 3 additions & 5 deletions src/parser/content-evaluator-module.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@ import { ContextPlugin } from "../types/plugin-input";
import { GithubCommentScore, Result } from "../types/results";
import { TfIdf } from "../helpers/tf-idf";

const TOKEN_MODEL_LIMIT = 124000;

/**
* Evaluates and rates comments.
*/
Expand Down Expand Up @@ -178,13 +176,14 @@ export class ContentEvaluatorModule extends BaseModule {
): Promise<Relevances> {
let commentRelevances: Relevances = {};
let prCommentRelevances: Relevances = {};
const tokenLimit = this._configuration?.openAi.tokenCountLimit ?? 124000;

if (comments.length) {
const dummyResponse = JSON.stringify(this._generateDummyResponse(comments), null, 2);
const maxTokens = this._calculateMaxTokens(dummyResponse);

let promptForComments = this._generatePromptForComments(specification, comments, allComments);
if (this._calculateMaxTokens(promptForComments, Infinity) > TOKEN_MODEL_LIMIT) {
if (this._calculateMaxTokens(promptForComments, Infinity) > tokenLimit) {
const tfidf = new TfIdf();
const mostImportantComments = tfidf.getTopComments(specification, allComments);
promptForComments = this._generatePromptForComments(
Expand All @@ -193,7 +192,6 @@ export class ContentEvaluatorModule extends BaseModule {
mostImportantComments.map((o) => o.comment)
);
}
console.log(promptForComments);
commentRelevances = await this._submitPrompt(promptForComments, maxTokens);
}

Expand All @@ -202,7 +200,7 @@ export class ContentEvaluatorModule extends BaseModule {
const maxTokens = this._calculateMaxTokens(dummyResponse);

let promptForPrComments = this._generatePromptForPrComments(specification, prComments);
if (this._calculateMaxTokens(promptForPrComments, Infinity) > TOKEN_MODEL_LIMIT) {
if (this._calculateMaxTokens(promptForPrComments, Infinity) > tokenLimit) {
const tfidf = new TfIdf();
const mostImportantComments = tfidf.getTopComments(specification, allComments);
promptForPrComments = this._generatePromptForComments(
Expand Down

0 comments on commit a0695e3

Please sign in to comment.