Skip to content

Commit

Permalink
Set random seed in tests
Browse files Browse the repository at this point in the history
  • Loading branch information
jvamvas committed Jan 31, 2024
1 parent 67b7074 commit 608cc68
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 2 deletions.
4 changes: 3 additions & 1 deletion tests/test_generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from unittest import TestCase

import torch
from transformers import AutoTokenizer, GPT2LMHeadModel, M2M100ForConditionalGeneration, GenerationConfig
from transformers import AutoTokenizer, GPT2LMHeadModel, M2M100ForConditionalGeneration, GenerationConfig, set_seed
from transformers.generation import SampleDecoderOnlyOutput, SampleEncoderDecoderOutput

from mbr import MBR, MBRConfig, MBROutput, MetricOutput
Expand All @@ -13,6 +13,7 @@
class DecoderOnlyTestCase(TestCase):

def setUp(self):
set_seed(42)
self.model = MBR(GPT2LMHeadModel).from_pretrained("distilgpt2").eval()
self.tokenizer = AutoTokenizer.from_pretrained("distilgpt2")

Expand Down Expand Up @@ -189,6 +190,7 @@ def test_references_config(self):
class EncoderDecoderTestCase(TestCase):

def setUp(self):
set_seed(42)
self.model = MBR(M2M100ForConditionalGeneration).from_pretrained("alirezamsh/small100").eval()
self.tokenizer = AutoTokenizer.from_pretrained("alirezamsh/small100")
self.tokenizer.tgt_lang = "fr"
Expand Down
4 changes: 3 additions & 1 deletion tests/test_pipelines.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import unittest
from unittest import TestCase

from transformers import AutoTokenizer, pipeline, GPT2LMHeadModel, M2M100ForConditionalGeneration
from transformers import AutoTokenizer, pipeline, GPT2LMHeadModel, M2M100ForConditionalGeneration, set_seed

from mbr import MBRConfig
from mbr import MBR
Expand All @@ -11,6 +11,7 @@
class TextGenerationTestCase(TestCase):

def setUp(self):
set_seed(42)
self.model = MBR(GPT2LMHeadModel).from_pretrained("distilgpt2").eval()
self.tokenizer = AutoTokenizer.from_pretrained("distilgpt2")
self.pipeline = pipeline("text-generation", model=self.model, tokenizer=self.tokenizer)
Expand All @@ -32,6 +33,7 @@ def test_pipeline(self):
class TranslationTestCase(TestCase):

def setUp(self):
set_seed(42)
self.model = MBR(M2M100ForConditionalGeneration).from_pretrained("alirezamsh/small100").eval()
self.tokenizer = AutoTokenizer.from_pretrained("alirezamsh/small100")
self.pipeline = pipeline("translation_en_to_fr", model=self.model, tokenizer=self.tokenizer)
Expand Down

0 comments on commit 608cc68

Please sign in to comment.