You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@opennlp.apache.org by jz...@apache.org on 2022/10/31 11:37:27 UTC
[opennlp] branch master updated: OPENNLP-1361 - Upgrade JUnit 4 to version 5 (#431)
This is an automated email from the ASF dual-hosted git repository.
jzemerick pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/opennlp.git
The following commit(s) were added to refs/heads/master by this push:
new bab2990d OPENNLP-1361 - Upgrade JUnit 4 to version 5 (#431)
bab2990d is described below
commit bab2990d95118b67a9fd89e80f9c292111c154a2
Author: Richard Zowalla <13...@users.noreply.github.com>
AuthorDate: Mon Oct 31 12:37:20 2022 +0100
OPENNLP-1361 - Upgrade JUnit 4 to version 5 (#431)
---
opennlp-brat-annotator/pom.xml | 10 +-
opennlp-dl/pom.xml | 10 +-
.../dl/doccat/DocumentCategorizerDLEval.java | 52 ++--
.../AverageClassificationScoringStrategyTest.java | 20 +-
.../opennlp/dl/namefinder/NameFinderDLEval.java | 56 ++--
opennlp-morfologik-addon/pom.xml | 10 +-
.../builder/POSDictionayBuilderTest.java | 6 +-
.../lemmatizer/MorfologikLemmatizerTest.java | 14 +-
.../tagdict/MorfologikTagDictionaryTest.java | 30 +-
.../morfologik/tagdict/POSTaggerFactoryTest.java | 10 +-
opennlp-tools/pom.xml | 10 +-
.../test/java/opennlp/tools/HighMemoryUsage.java | 12 +-
.../tools/chunker/ChunkSampleStreamTest.java | 46 +--
.../opennlp/tools/chunker/ChunkSampleTest.java | 172 ++++++-----
.../ChunkerDetailedFMeasureListenerTest.java | 8 +-
.../tools/chunker/ChunkerEvaluatorTest.java | 20 +-
.../opennlp/tools/chunker/ChunkerFactoryTest.java | 28 +-
.../java/opennlp/tools/chunker/ChunkerMEIT.java | 20 +-
.../java/opennlp/tools/chunker/ChunkerMETest.java | 113 +++----
.../opennlp/tools/chunker/ChunkerModelTest.java | 25 +-
.../opennlp/tools/cmdline/ArgumentParserTest.java | 93 +++---
.../test/java/opennlp/tools/cmdline/CLITest.java | 46 +--
.../tools/cmdline/TerminateToolExceptionTest.java | 8 +-
.../tools/cmdline/TokenNameFinderToolTest.java | 59 ++--
.../convert/FileToStringSampleStreamTest.java | 34 +--
.../DictionaryAsSetCaseInsensitiveTest.java | 52 ++--
.../DictionaryAsSetCaseSensitiveTest.java | 56 ++--
.../opennlp/tools/dictionary/DictionaryTest.java | 62 ++--
.../doccat/BagOfWordsFeatureGeneratorTest.java | 29 +-
.../opennlp/tools/doccat/DoccatFactoryTest.java | 32 +-
.../tools/doccat/DocumentCategorizerMETest.java | 59 ++--
.../tools/doccat/DocumentCategorizerNBTest.java | 30 +-
.../opennlp/tools/doccat/DocumentSampleTest.java | 26 +-
.../tools/doccat/NGramFeatureGeneratorTest.java | 93 +++---
.../java/opennlp/tools/eval/AbstractEvalTest.java | 18 +-
.../opennlp/tools/eval/ArvoresDeitadasEval.java | 57 ++--
.../opennlp/tools/eval/Conll00ChunkerEval.java | 36 ++-
.../opennlp/tools/eval/Conll02NameFinderEval.java | 130 ++++-----
.../opennlp/tools/eval/ConllXPosTaggerEval.java | 87 +++---
.../tools/eval/OntoNotes4NameFinderEval.java | 24 +-
.../opennlp/tools/eval/OntoNotes4ParserEval.java | 14 +-
.../tools/eval/OntoNotes4PosTaggerEval.java | 16 +-
.../opennlp/tools/eval/SourceForgeModelEval.java | 72 ++---
.../tools/eval/UniversalDependency20Eval.java | 22 +-
.../tools/formats/Conll02NameSampleStreamTest.java | 38 +--
.../tools/formats/Conll03NameSampleStreamTest.java | 62 ++--
.../tools/formats/ConllXPOSSampleStreamTest.java | 154 +++++-----
.../tools/formats/DirectorySampleStreamTest.java | 120 +++++---
.../tools/formats/EvalitaNameSampleStreamTest.java | 28 +-
.../formats/NameFinderCensus90NameStreamTest.java | 71 +++--
.../tools/formats/ResourceAsStreamFactory.java | 3 +-
.../tools/formats/ad/ADChunkSampleStreamTest.java | 48 +--
.../tools/formats/ad/ADNameSampleStreamTest.java | 122 ++++----
.../tools/formats/ad/ADPOSSampleStreamTest.java | 74 ++---
.../tools/formats/ad/ADParagraphStreamTest.java | 12 +-
.../formats/ad/ADSentenceSampleStreamTest.java | 26 +-
.../tools/formats/ad/ADTokenSampleStreamTest.java | 26 +-
.../formats/brat/BratAnnotationStreamTest.java | 6 +-
.../tools/formats/brat/BratDocumentParserTest.java | 34 +--
.../tools/formats/brat/BratDocumentTest.java | 50 ++--
.../formats/brat/BratNameSampleStreamTest.java | 41 +--
.../conllu/ConlluLemmaSampleStreamTest.java | 12 +-
.../formats/conllu/ConlluPOSSampleStreamTest.java | 12 +-
.../conllu/ConlluSentenceSampleStreamTest.java | 24 +-
.../tools/formats/conllu/ConlluStreamTest.java | 90 +++---
.../conllu/ConlluTokenSampleStreamTest.java | 20 +-
.../tools/formats/conllu/ConlluWordLineTest.java | 26 +-
.../ConstitParseSampleStreamTest.java | 24 +-
.../IrishSentenceBankDocumentTest.java | 34 +--
.../leipzig/LeipzigLanguageSampleStreamTest.java | 36 ++-
.../tools/formats/letsmt/LetsmtDocumentTest.java | 20 +-
.../masc/MascNamedEntitySampleStreamTest.java | 43 ++-
.../formats/masc/MascPOSSampleStreamTest.java | 38 ++-
.../formats/masc/MascSentenceSampleStreamTest.java | 38 ++-
.../formats/masc/MascTokenSampleStreamTest.java | 38 ++-
.../formats/muc/DocumentSplitterStreamTest.java | 18 +-
.../opennlp/tools/formats/muc/SgmlParserTest.java | 4 +-
.../formats/nkjp/NKJPSegmentationDocumentTest.java | 19 +-
.../tools/formats/nkjp/NKJPTextDocumentTest.java | 27 +-
...efaultLanguageDetectorContextGeneratorTest.java | 16 +-
.../LanguageDetectorCrossValidatorTest.java | 9 +-
.../langdetect/LanguageDetectorEvaluatorTest.java | 18 +-
.../langdetect/LanguageDetectorFactoryTest.java | 26 +-
.../tools/langdetect/LanguageDetectorMETest.java | 48 +--
.../tools/langdetect/LanguageSampleTest.java | 68 +++--
.../opennlp/tools/langdetect/LanguageTest.java | 65 +++--
.../languagemodel/LanguageModelEvaluationTest.java | 10 +-
.../languagemodel/LanguageModelTestUtils.java | 6 +-
.../languagemodel/NgramLanguageModelTest.java | 68 ++---
.../lemmatizer/DictionaryLemmatizerMultiTest.java | 31 +-
.../tools/lemmatizer/DictionaryLemmatizerTest.java | 33 ++-
.../opennlp/tools/lemmatizer/LemmaSampleTest.java | 62 ++--
.../tools/lemmatizer/LemmatizerEvaluatorTest.java | 14 +-
.../opennlp/tools/lemmatizer/LemmatizerMETest.java | 56 ++--
.../test/java/opennlp/tools/ml/ArrayMathTest.java | 113 +++----
.../test/java/opennlp/tools/ml/BeamSearchTest.java | 63 ++--
.../java/opennlp/tools/ml/MockEventTrainer.java | 5 +-
.../java/opennlp/tools/ml/MockSequenceTrainer.java | 3 +-
.../java/opennlp/tools/ml/PrepAttachDataUtil.java | 4 +-
.../java/opennlp/tools/ml/TrainerFactoryTest.java | 34 +--
.../tools/ml/maxent/FootballEventStream.java | 14 +-
.../opennlp/tools/ml/maxent/GISIndexingTest.java | 116 ++++----
.../opennlp/tools/ml/maxent/GISTrainerTest.java | 40 +--
.../tools/ml/maxent/MaxentPrepAttachTest.java | 31 +-
.../opennlp/tools/ml/maxent/MockDataIndexer.java | 5 +-
.../tools/ml/maxent/RealValueModelTest.java | 29 +-
.../tools/ml/maxent/ScaleDoesntMatterTest.java | 16 +-
.../tools/ml/maxent/URLInputStreamFactory.java | 3 +-
.../ml/maxent/io/RealValueFileEventStreamTest.java | 16 +-
.../ml/maxent/quasinewton/LineSearchTest.java | 82 +++---
.../maxent/quasinewton/NegLogLikelihoodTest.java | 89 +++---
.../ml/maxent/quasinewton/QNMinimizerTest.java | 24 +-
.../ml/maxent/quasinewton/QNPrepAttachTest.java | 26 +-
.../tools/ml/maxent/quasinewton/QNTrainerTest.java | 49 ++--
.../java/opennlp/tools/ml/model/EventTest.java | 46 ++-
.../tools/ml/model/FileEventStreamTest.java | 55 ++--
.../tools/ml/model/ModelParameterChunkerTest.java | 41 ++-
.../tools/ml/model/OnePassDataIndexerTest.java | 38 +--
.../ml/model/OnePassRealValueDataIndexerTest.java | 100 +++----
.../ml/model/RealValueFileEventStreamTest.java | 55 ++--
.../tools/ml/model/SimpleEventStreamBuilder.java | 8 +-
.../tools/ml/model/TwoPassDataIndexerTest.java | 48 +--
.../ml/naivebayes/NaiveBayesCorrectnessTest.java | 35 +--
.../naivebayes/NaiveBayesModelReadWriteTest.java | 27 +-
.../ml/naivebayes/NaiveBayesPrepAttachTest.java | 22 +-
.../NaiveBayesSerializedCorrectnessTest.java | 36 +--
.../ml/perceptron/PerceptronPrepAttachTest.java | 32 +-
.../opennlp/tools/namefind/BilouCodecTest.java | 306 +++++++++----------
.../BilouNameFinderSequenceValidatorTest.java | 158 +++++-----
.../java/opennlp/tools/namefind/BioCodecTest.java | 130 +++++----
.../DictionaryNameFinderEvaluatorTest.java | 14 +-
.../tools/namefind/DictionaryNameFinderTest.java | 60 ++--
.../tools/namefind/NameFinderEventStreamTest.java | 66 ++---
.../opennlp/tools/namefind/NameFinderMETest.java | 146 ++++-----
.../namefind/NameFinderSequenceValidatorTest.java | 60 ++--
.../tools/namefind/NameSampleDataStreamTest.java | 325 ++++++++++-----------
.../opennlp/tools/namefind/NameSampleTest.java | 147 +++++-----
.../tools/namefind/NameSampleTypeFilterTest.java | 22 +-
.../tools/namefind/RegexNameFinderFactoryTest.java | 48 +--
.../tools/namefind/RegexNameFinderTest.java | 42 +--
.../TokenNameFinderCrossValidatorTest.java | 65 +++--
.../namefind/TokenNameFinderEvaluatorTest.java | 27 +-
.../tools/namefind/TokenNameFinderModelTest.java | 21 +-
.../opennlp/tools/ngram/NGramCharModelTest.java | 50 ++--
.../opennlp/tools/ngram/NGramGeneratorTest.java | 120 ++++----
.../java/opennlp/tools/ngram/NGramModelTest.java | 198 +++++++------
.../java/opennlp/tools/ngram/NGramUtilsTest.java | 42 +--
.../tools/parser/ChunkSampleStreamTest.java | 118 ++++----
.../tools/parser/ParseSampleStreamTest.java | 16 +-
.../test/java/opennlp/tools/parser/ParseTest.java | 70 ++---
.../java/opennlp/tools/parser/ParserTestUtil.java | 4 +-
.../opennlp/tools/parser/PosSampleStreamTest.java | 80 ++---
.../opennlp/tools/parser/chunking/ParserTest.java | 6 +-
.../tools/parser/lang/en/HeadRulesTest.java | 8 +-
.../tools/parser/treeinsert/ParserTest.java | 4 +-
.../ConfigurablePOSContextGeneratorTest.java | 20 +-
.../postag/DefaultPOSContextGeneratorTest.java | 46 ++-
.../tools/postag/DummyPOSTaggerFactory.java | 5 +-
.../opennlp/tools/postag/POSDictionaryTest.java | 55 ++--
.../opennlp/tools/postag/POSEvaluatorTest.java | 16 +-
.../java/opennlp/tools/postag/POSModelTest.java | 12 +-
.../tools/postag/POSSampleEventStreamTest.java | 16 +-
.../java/opennlp/tools/postag/POSSampleTest.java | 49 ++--
.../opennlp/tools/postag/POSTaggerFactoryTest.java | 66 +++--
.../java/opennlp/tools/postag/POSTaggerMEIT.java | 22 +-
.../java/opennlp/tools/postag/POSTaggerMETest.java | 56 ++--
.../tools/postag/WordTagSampleStreamTest.java | 36 +--
.../DefaultEndOfSentenceScannerTest.java | 18 +-
.../sentdetect/DefaultSDContextGeneratorTest.java | 16 +-
.../sentdetect/NewlineSentenceDetectorTest.java | 18 +-
.../tools/sentdetect/SDEventStreamTest.java | 16 +-
.../sentdetect/SentenceDetectorEvaluatorTest.java | 19 +-
.../sentdetect/SentenceDetectorFactoryTest.java | 86 +++---
.../tools/sentdetect/SentenceDetectorMEIT.java | 72 ++---
.../tools/sentdetect/SentenceDetectorMETest.java | 106 +++----
.../tools/sentdetect/SentenceSampleTest.java | 41 +--
.../opennlp/tools/stemmer/PorterStemmerTest.java | 20 +-
.../opennlp/tools/stemmer/SnowballStemmerTest.java | 148 +++++-----
.../tokenize/DetokenizationDictionaryTest.java | 22 +-
.../tools/tokenize/DetokenizerEvaluatorTest.java | 18 +-
.../tools/tokenize/DictionaryDetokenizerTest.java | 38 +--
.../tools/tokenize/SimpleTokenizerTest.java | 92 +++---
.../tools/tokenize/TokSpanEventStreamTest.java | 20 +-
.../tools/tokenize/TokenSampleStreamTest.java | 53 ++--
.../opennlp/tools/tokenize/TokenSampleTest.java | 56 ++--
.../tools/tokenize/TokenizerEvaluatorTest.java | 17 +-
.../tools/tokenize/TokenizerFactoryTest.java | 120 ++++----
.../java/opennlp/tools/tokenize/TokenizerMEIT.java | 14 +-
.../opennlp/tools/tokenize/TokenizerMETest.java | 111 +++----
.../opennlp/tools/tokenize/TokenizerModelTest.java | 4 +-
.../tools/tokenize/WhitespaceTokenStreamTest.java | 40 +--
.../tools/tokenize/WhitespaceTokenizerTest.java | 94 +++---
.../tools/tokenize/WordpieceTokenizerTest.java | 16 +-
.../tools/util/AbstractEventStreamTest.java | 16 +-
.../opennlp/tools/util/MockInputStreamFactory.java | 3 +-
.../opennlp/tools/util/ObjectStreamUtilsTest.java | 67 +++--
.../opennlp/tools/util/ParagraphStreamTest.java | 34 +--
.../tools/util/PlainTextByLineStreamTest.java | 52 ++--
.../test/java/opennlp/tools/util/SequenceTest.java | 26 +-
.../src/test/java/opennlp/tools/util/SpanTest.java | 167 ++++++-----
.../java/opennlp/tools/util/StringListTest.java | 60 ++--
.../java/opennlp/tools/util/StringUtilTest.java | 54 ++--
.../opennlp/tools/util/TrainingParametersTest.java | 82 +++---
.../test/java/opennlp/tools/util/VersionTest.java | 32 +-
.../util/eval/CrossValidationPartitionerTest.java | 131 ++++-----
.../java/opennlp/tools/util/eval/FMeasureTest.java | 52 ++--
.../java/opennlp/tools/util/eval/MeanTest.java | 24 +-
.../tools/util/ext/ExtensionLoaderTest.java | 10 +-
.../featuregen/BigramNameFeatureGeneratorTest.java | 42 +--
.../BrownBigramFeatureGeneratorTest.java | 48 +--
.../featuregen/CachedFeatureGeneratorTest.java | 48 +--
.../CharacterNgramFeatureGeneratorTest.java | 30 +-
.../FeatureGenWithSerializerMapping.java | 4 +-
.../util/featuregen/FeatureGeneratorUtilTest.java | 62 ++--
.../GeneratorFactoryClassicFormatTest.java | 50 ++--
.../util/featuregen/GeneratorFactoryTest.java | 128 ++++----
.../util/featuregen/IdentityFeatureGenerator.java | 2 +-
.../tools/util/featuregen/InSpanGeneratorTest.java | 20 +-
.../POSTaggerNameFeatureGeneratorTest.java | 8 +-
.../featuregen/PosTaggerFeatureGeneratorTest.java | 28 +-
.../featuregen/PrefixFeatureGeneratorTest.java | 74 ++---
.../PreviousMapFeatureGeneratorTest.java | 18 +-
.../PreviousTwoMapFeatureGeneratorTest.java | 20 +-
.../featuregen/SentenceFeatureGeneratorTest.java | 66 ++---
.../tools/util/featuregen/StringPatternTest.java | 156 +++++-----
.../featuregen/SuffixFeatureGeneratorTest.java | 70 ++---
.../featuregen/TokenClassFeatureGeneratorTest.java | 24 +-
.../util/featuregen/TokenFeatureGeneratorTest.java | 22 +-
.../TokenPatternFeatureGeneratorTest.java | 48 +--
.../TrigramNameFeatureGeneratorTest.java | 50 ++--
.../featuregen/WindowFeatureGeneratorTest.java | 54 ++--
.../tools/util/model/ByteArraySerializerTest.java | 12 +-
.../EmojiCharSequenceNormalizerTest.java | 8 +-
.../NumberCharSequenceNormalizerTest.java | 8 +-
.../ShrinkCharSequenceNormalizerTest.java | 16 +-
.../TwitterCharSequenceNormalizerTest.java | 39 ++-
.../normalizer/UrlCharSequenceNormalizerTest.java | 20 +-
opennlp-uima/pom.xml | 10 +-
.../opennlp/uima/AnnotatorsInitializationTest.java | 4 +-
.../uima/dictionary/DictionaryResourceTest.java | 30 +-
.../uima/util/AnnotationComboIteratorTest.java | 8 +-
pom.xml | 14 +-
242 files changed, 5676 insertions(+), 5502 deletions(-)
diff --git a/opennlp-brat-annotator/pom.xml b/opennlp-brat-annotator/pom.xml
index 5d2f0735..2c9fc7ea 100644
--- a/opennlp-brat-annotator/pom.xml
+++ b/opennlp-brat-annotator/pom.xml
@@ -49,8 +49,14 @@
</dependency>
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
+ <groupId>org.junit.jupiter</groupId>
+ <artifactId>junit-jupiter-api</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.junit.jupiter</groupId>
+ <artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
diff --git a/opennlp-dl/pom.xml b/opennlp-dl/pom.xml
index db9159a6..18bbb678 100644
--- a/opennlp-dl/pom.xml
+++ b/opennlp-dl/pom.xml
@@ -43,9 +43,13 @@
<version>${onnxruntime.version}</version>
</dependency>
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>${junit.version}</version>
+ <groupId>org.junit.jupiter</groupId>
+ <artifactId>junit-jupiter-api</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.junit.jupiter</groupId>
+ <artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
diff --git a/opennlp-dl/src/test/java/opennlp/dl/doccat/DocumentCategorizerDLEval.java b/opennlp-dl/src/test/java/opennlp/dl/doccat/DocumentCategorizerDLEval.java
index d10afc50..26fa89dc 100644
--- a/opennlp-dl/src/test/java/opennlp/dl/doccat/DocumentCategorizerDLEval.java
+++ b/opennlp-dl/src/test/java/opennlp/dl/doccat/DocumentCategorizerDLEval.java
@@ -26,9 +26,9 @@ import java.util.Set;
import ai.onnxruntime.OrtException;
-import org.junit.Assert;
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
import opennlp.dl.AbstactDLTest;
import opennlp.dl.InferenceOptions;
@@ -73,15 +73,15 @@ public class DocumentCategorizerDLEval extends AbstactDLTest {
0.09859892477591832,
0.07552650570869446};
- Assert.assertTrue(Arrays.equals(expected, result));
- Assert.assertEquals(5, result.length);
+ Assertions.assertTrue(Arrays.equals(expected, result));
+ Assertions.assertEquals(5, result.length);
final String category = documentCategorizerDL.getBestCategory(result);
- Assert.assertEquals("very bad", category);
+ Assertions.assertEquals("very bad", category);
}
- @Ignore("This test will should only be run if a GPU device is present.")
+ @Disabled("This test will should only be run if a GPU device is present.")
@Test
public void categorizeWithGpu() throws Exception {
@@ -109,11 +109,11 @@ public class DocumentCategorizerDLEval extends AbstactDLTest {
0.3003573715686798,
0.6352779865264893};
- Assert.assertTrue(Arrays.equals(expected, result));
- Assert.assertEquals(5, result.length);
+ Assertions.assertTrue(Arrays.equals(expected, result));
+ Assertions.assertEquals(5, result.length);
final String category = documentCategorizerDL.getBestCategory(result);
- Assert.assertEquals("very good", category);
+ Assertions.assertEquals("very good", category);
}
@@ -142,11 +142,11 @@ public class DocumentCategorizerDLEval extends AbstactDLTest {
final double[] expected = new double[]{0.8851314783096313, 0.11486853659152985};
- Assert.assertTrue(Arrays.equals(expected, result));
- Assert.assertEquals(2, result.length);
+ Assertions.assertTrue(Arrays.equals(expected, result));
+ Assertions.assertEquals(2, result.length);
final String category = documentCategorizerDL.getBestCategory(result);
- Assert.assertEquals("negative", category);
+ Assertions.assertEquals("negative", category);
}
@@ -165,11 +165,11 @@ public class DocumentCategorizerDLEval extends AbstactDLTest {
final Map<String, Double> result = documentCategorizerDL.scoreMap(new String[]{"I am happy"});
- Assert.assertEquals(0.6352779865264893, result.get("very good").doubleValue(), 0);
- Assert.assertEquals(0.3003573715686798, result.get("good").doubleValue(), 0);
- Assert.assertEquals(0.04995147883892059, result.get("neutral").doubleValue(), 0);
- Assert.assertEquals(0.006593209225684404, result.get("bad").doubleValue(), 0);
- Assert.assertEquals(0.007819971069693565, result.get("very bad").doubleValue(), 0);
+ Assertions.assertEquals(0.6352779865264893, result.get("very good").doubleValue(), 0);
+ Assertions.assertEquals(0.3003573715686798, result.get("good").doubleValue(), 0);
+ Assertions.assertEquals(0.04995147883892059, result.get("neutral").doubleValue(), 0);
+ Assertions.assertEquals(0.006593209225684404, result.get("bad").doubleValue(), 0);
+ Assertions.assertEquals(0.007819971069693565, result.get("very bad").doubleValue(), 0);
}
@@ -188,11 +188,11 @@ public class DocumentCategorizerDLEval extends AbstactDLTest {
final Map<Double, Set<String>> result = documentCategorizerDL.sortedScoreMap(new String[]{"I am happy"});
- Assert.assertEquals(result.get(0.6352779865264893).size(), 1);
- Assert.assertEquals(result.get(0.3003573715686798).size(), 1);
- Assert.assertEquals(result.get(0.04995147883892059).size(), 1);
- Assert.assertEquals(result.get(0.006593209225684404).size(), 1);
- Assert.assertEquals(result.get(0.007819971069693565).size(), 1);
+ Assertions.assertEquals(result.get(0.6352779865264893).size(), 1);
+ Assertions.assertEquals(result.get(0.3003573715686798).size(), 1);
+ Assertions.assertEquals(result.get(0.04995147883892059).size(), 1);
+ Assertions.assertEquals(result.get(0.006593209225684404).size(), 1);
+ Assertions.assertEquals(result.get(0.007819971069693565).size(), 1);
}
@@ -210,13 +210,13 @@ public class DocumentCategorizerDLEval extends AbstactDLTest {
new InferenceOptions());
final int index = documentCategorizerDL.getIndex("bad");
- Assert.assertEquals(1, index);
+ Assertions.assertEquals(1, index);
final String category = documentCategorizerDL.getCategory(3);
- Assert.assertEquals("good", category);
+ Assertions.assertEquals("good", category);
final int number = documentCategorizerDL.getNumberOfCategories();
- Assert.assertEquals(5, number);
+ Assertions.assertEquals(5, number);
}
diff --git a/opennlp-dl/src/test/java/opennlp/dl/doccat/scoring/AverageClassificationScoringStrategyTest.java b/opennlp-dl/src/test/java/opennlp/dl/doccat/scoring/AverageClassificationScoringStrategyTest.java
index 10ced922..977c2e91 100644
--- a/opennlp-dl/src/test/java/opennlp/dl/doccat/scoring/AverageClassificationScoringStrategyTest.java
+++ b/opennlp-dl/src/test/java/opennlp/dl/doccat/scoring/AverageClassificationScoringStrategyTest.java
@@ -20,8 +20,8 @@ package opennlp.dl.doccat.scoring;
import java.util.LinkedList;
import java.util.List;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
public class AverageClassificationScoringStrategyTest {
@@ -36,11 +36,11 @@ public class AverageClassificationScoringStrategyTest {
final ClassificationScoringStrategy strategy = new AverageClassificationScoringStrategy();
final double[] results = strategy.score(scores);
- Assert.assertEquals(1.0, results[0], 0);
- Assert.assertEquals(2.0, results[1], 0);
- Assert.assertEquals(3.0, results[2], 0);
- Assert.assertEquals(4.0, results[3], 0);
- Assert.assertEquals(5.0, results[4], 0);
+ Assertions.assertEquals(1.0, results[0], 0);
+ Assertions.assertEquals(2.0, results[1], 0);
+ Assertions.assertEquals(3.0, results[2], 0);
+ Assertions.assertEquals(4.0, results[3], 0);
+ Assertions.assertEquals(5.0, results[4], 0);
}
@@ -55,9 +55,9 @@ public class AverageClassificationScoringStrategyTest {
final ClassificationScoringStrategy strategy = new AverageClassificationScoringStrategy();
final double[] results = strategy.score(scores);
- Assert.assertEquals(4.0, results[0], 0);
- Assert.assertEquals(3.0, results[1], 0);
- Assert.assertEquals(10.0, results[2], 0);
+ Assertions.assertEquals(4.0, results[0], 0);
+ Assertions.assertEquals(3.0, results[1], 0);
+ Assertions.assertEquals(10.0, results[2], 0);
}
diff --git a/opennlp-dl/src/test/java/opennlp/dl/namefinder/NameFinderDLEval.java b/opennlp-dl/src/test/java/opennlp/dl/namefinder/NameFinderDLEval.java
index 3749adc9..26beab04 100644
--- a/opennlp-dl/src/test/java/opennlp/dl/namefinder/NameFinderDLEval.java
+++ b/opennlp-dl/src/test/java/opennlp/dl/namefinder/NameFinderDLEval.java
@@ -23,8 +23,8 @@ import java.util.Map;
import ai.onnxruntime.OrtException;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.dl.AbstactDLTest;
import opennlp.tools.util.Span;
@@ -50,11 +50,11 @@ public class NameFinderDLEval extends AbstactDLTest {
System.out.println(span.toString());
}
- Assert.assertEquals(1, spans.length);
- Assert.assertEquals(0, spans[0].getStart());
- Assert.assertEquals(17, spans[0].getEnd());
- Assert.assertEquals(8.251646041870117, spans[0].getProb(), 0.0);
- Assert.assertEquals("George Washington", spans[0].getCoveredText(String.join(" ", tokens)));
+ Assertions.assertEquals(1, spans.length);
+ Assertions.assertEquals(0, spans[0].getStart());
+ Assertions.assertEquals(17, spans[0].getEnd());
+ Assertions.assertEquals(8.251646041870117, spans[0].getProb(), 0.0);
+ Assertions.assertEquals("George Washington", spans[0].getCoveredText(String.join(" ", tokens)));
}
@@ -76,9 +76,9 @@ public class NameFinderDLEval extends AbstactDLTest {
System.out.println(span.toString());
}
- Assert.assertEquals(1, spans.length);
- Assert.assertEquals(13, spans[0].getStart());
- Assert.assertEquals(30, spans[0].getEnd());
+ Assertions.assertEquals(1, spans.length);
+ Assertions.assertEquals(13, spans[0].getStart());
+ Assertions.assertEquals(30, spans[0].getEnd());
}
@@ -100,9 +100,9 @@ public class NameFinderDLEval extends AbstactDLTest {
System.out.println(span.toString());
}
- Assert.assertEquals(1, spans.length);
- Assert.assertEquals(13, spans[0].getStart());
- Assert.assertEquals(19, spans[0].getEnd());
+ Assertions.assertEquals(1, spans.length);
+ Assertions.assertEquals(13, spans[0].getStart());
+ Assertions.assertEquals(19, spans[0].getEnd());
}
@@ -120,7 +120,7 @@ public class NameFinderDLEval extends AbstactDLTest {
final NameFinderDL nameFinderDL = new NameFinderDL(model, vocab, getIds2Labels());
final Span[] spans = nameFinderDL.find(tokens);
- Assert.assertEquals(0, spans.length);
+ Assertions.assertEquals(0, spans.length);
}
@@ -138,7 +138,7 @@ public class NameFinderDLEval extends AbstactDLTest {
final NameFinderDL nameFinderDL = new NameFinderDL(model, vocab, getIds2Labels());
final Span[] spans = nameFinderDL.find(tokens);
- Assert.assertEquals(0, spans.length);
+ Assertions.assertEquals(0, spans.length);
}
@@ -161,24 +161,26 @@ public class NameFinderDLEval extends AbstactDLTest {
System.out.println(span.toString());
}
- Assert.assertEquals(2, spans.length);
- Assert.assertEquals(0, spans[0].getStart());
- Assert.assertEquals(17, spans[0].getEnd());
- Assert.assertEquals(22, spans[1].getStart());
- Assert.assertEquals(37, spans[1].getEnd());
+ Assertions.assertEquals(2, spans.length);
+ Assertions.assertEquals(0, spans[0].getStart());
+ Assertions.assertEquals(17, spans[0].getEnd());
+ Assertions.assertEquals(22, spans[1].getStart());
+ Assertions.assertEquals(37, spans[1].getEnd());
}
- @Test(expected = OrtException.class)
- public void invalidModel() throws Exception {
+ @Test
+ public void invalidModel() {
- // This test was written using the dslim/bert-base-NER model.
- // You will need to update the ids2Labels and assertions if you use a different model.
+ Assertions.assertThrows(OrtException.class, () -> {
+ // This test was written using the dslim/bert-base-NER model.
+ // You will need to update the ids2Labels and assertions if you use a different model.
- final File model = new File("invalid.onnx");
- final File vocab = new File("vocab.txt");
+ final File model = new File("invalid.onnx");
+ final File vocab = new File("vocab.txt");
- new NameFinderDL(model, vocab, getIds2Labels());
+ new NameFinderDL(model, vocab, getIds2Labels());
+ });
}
diff --git a/opennlp-morfologik-addon/pom.xml b/opennlp-morfologik-addon/pom.xml
index a36d2518..c0a3d4a0 100644
--- a/opennlp-morfologik-addon/pom.xml
+++ b/opennlp-morfologik-addon/pom.xml
@@ -61,8 +61,14 @@
</dependency>
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
+ <groupId>org.junit.jupiter</groupId>
+ <artifactId>junit-jupiter-api</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.junit.jupiter</groupId>
+ <artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
diff --git a/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/builder/POSDictionayBuilderTest.java b/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/builder/POSDictionayBuilderTest.java
index 93bb3fa6..39bc2cb3 100644
--- a/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/builder/POSDictionayBuilderTest.java
+++ b/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/builder/POSDictionayBuilderTest.java
@@ -26,8 +26,8 @@ import java.util.Arrays;
import morfologik.stemming.DictionaryMetadata;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.morfologik.lemmatizer.MorfologikLemmatizer;
@@ -75,7 +75,7 @@ public class POSDictionayBuilderTest {
public void testBuildDictionary() throws Exception {
Path output = createMorfologikDictionary();
MorfologikLemmatizer ml = new MorfologikLemmatizer(output);
- Assert.assertNotNull(ml);
+ Assertions.assertNotNull(ml);
output.toFile().deleteOnExit();
}
diff --git a/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/lemmatizer/MorfologikLemmatizerTest.java b/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/lemmatizer/MorfologikLemmatizerTest.java
index f522c566..cbf1c6fd 100644
--- a/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/lemmatizer/MorfologikLemmatizerTest.java
+++ b/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/lemmatizer/MorfologikLemmatizerTest.java
@@ -21,8 +21,8 @@ import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.morfologik.builder.POSDictionayBuilderTest;
import opennlp.tools.lemmatizer.Lemmatizer;
@@ -39,11 +39,11 @@ public class MorfologikLemmatizerTest {
String[] lemmas = dict.lemmatize(toks, tags);
- Assert.assertEquals("casar", lemmas[0]);
- Assert.assertEquals("casa", lemmas[1]);
+ Assertions.assertEquals("casar", lemmas[0]);
+ Assertions.assertEquals("casa", lemmas[1]);
// lookup is case insensitive. There is no entry casa - prop
- Assert.assertNull(lemmas[2]);
+ Assertions.assertNull(lemmas[2]);
}
@Test
@@ -55,8 +55,8 @@ public class MorfologikLemmatizerTest {
List<List<String>> lemmas = dict.lemmatize(Arrays.asList(toks), Arrays.asList(tags));
- Assert.assertTrue(lemmas.get(0).contains("ir"));
- Assert.assertTrue(lemmas.get(0).contains("ser"));
+ Assertions.assertTrue(lemmas.get(0).contains("ir"));
+ Assertions.assertTrue(lemmas.get(0).contains("ser"));
}
private MorfologikLemmatizer createDictionary(boolean caseSensitive)
diff --git a/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/tagdict/MorfologikTagDictionaryTest.java b/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/tagdict/MorfologikTagDictionaryTest.java
index c367d3f7..b3a0cbde 100644
--- a/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/tagdict/MorfologikTagDictionaryTest.java
+++ b/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/tagdict/MorfologikTagDictionaryTest.java
@@ -23,8 +23,8 @@ import java.util.List;
import morfologik.stemming.Dictionary;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.morfologik.builder.POSDictionayBuilderTest;
import opennlp.tools.postag.TagDictionary;
@@ -36,8 +36,8 @@ public class MorfologikTagDictionaryTest {
MorfologikTagDictionary dict = createDictionary(false);
List<String> tags = Arrays.asList(dict.getTags("carro"));
- Assert.assertEquals(1, tags.size());
- Assert.assertTrue(tags.contains("NOUN"));
+ Assertions.assertEquals(1, tags.size());
+ Assertions.assertTrue(tags.contains("NOUN"));
}
@Test
@@ -45,17 +45,17 @@ public class MorfologikTagDictionaryTest {
TagDictionary dict = createDictionary(false);
List<String> tags = Arrays.asList(dict.getTags("casa"));
- Assert.assertEquals(2, tags.size());
- Assert.assertTrue(tags.contains("NOUN"));
- Assert.assertTrue(tags.contains("V"));
+ Assertions.assertEquals(2, tags.size());
+ Assertions.assertTrue(tags.contains("NOUN"));
+ Assertions.assertTrue(tags.contains("V"));
// this is the behavior of case insensitive dictionary
// if we search it using case insensitive, Casa as a proper noun
// should be lower case in the dictionary
tags = Arrays.asList(dict.getTags("Casa"));
- Assert.assertEquals(2, tags.size());
- Assert.assertTrue(tags.contains("NOUN"));
- Assert.assertTrue(tags.contains("V"));
+ Assertions.assertEquals(2, tags.size());
+ Assertions.assertTrue(tags.contains("NOUN"));
+ Assertions.assertTrue(tags.contains("V"));
}
@Test
@@ -63,16 +63,16 @@ public class MorfologikTagDictionaryTest {
TagDictionary dict = createDictionary(true);
List<String> tags = Arrays.asList(dict.getTags("casa"));
- Assert.assertEquals(2, tags.size());
- Assert.assertTrue(tags.contains("NOUN"));
- Assert.assertTrue(tags.contains("V"));
+ Assertions.assertEquals(2, tags.size());
+ Assertions.assertTrue(tags.contains("NOUN"));
+ Assertions.assertTrue(tags.contains("V"));
// this is the behavior of case insensitive dictionary
// if we search it using case insensitive, Casa as a proper noun
// should be lower case in the dictionary
tags = Arrays.asList(dict.getTags("Casa"));
- Assert.assertEquals(1, tags.size());
- Assert.assertTrue(tags.contains("PROP"));
+ Assertions.assertEquals(1, tags.size());
+ Assertions.assertTrue(tags.contains("PROP"));
}
diff --git a/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/tagdict/POSTaggerFactoryTest.java b/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/tagdict/POSTaggerFactoryTest.java
index e1692d06..a9ccba7c 100644
--- a/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/tagdict/POSTaggerFactoryTest.java
+++ b/opennlp-morfologik-addon/src/test/java/opennlp/morfologik/tagdict/POSTaggerFactoryTest.java
@@ -24,8 +24,8 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.morfologik.builder.POSDictionayBuilderTest;
import opennlp.tools.cmdline.CmdLineUtil;
@@ -79,7 +79,7 @@ public class POSTaggerFactoryTest {
POSModel posModel = trainPOSModel(ModelType.MAXENT, inFactory);
POSTaggerFactory factory = posModel.getFactory();
- Assert.assertTrue(factory.getTagDictionary() instanceof MorfologikTagDictionary);
+ Assertions.assertTrue(factory.getTagDictionary() instanceof MorfologikTagDictionary);
factory = null;
@@ -90,9 +90,9 @@ public class POSTaggerFactoryTest {
POSModel fromSerialized = new POSModel(in);
factory = fromSerialized.getFactory();
- Assert.assertTrue(factory.getTagDictionary() instanceof MorfologikTagDictionary);
+ Assertions.assertTrue(factory.getTagDictionary() instanceof MorfologikTagDictionary);
- Assert.assertEquals(2, factory.getTagDictionary().getTags("casa").length);
+ Assertions.assertEquals(2, factory.getTagDictionary().getTags("casa").length);
}
}
diff --git a/opennlp-tools/pom.xml b/opennlp-tools/pom.xml
index 1b15a440..9c4a1965 100644
--- a/opennlp-tools/pom.xml
+++ b/opennlp-tools/pom.xml
@@ -51,8 +51,14 @@
</dependency>
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
+ <groupId>org.junit.jupiter</groupId>
+ <artifactId>junit-jupiter-api</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.junit.jupiter</groupId>
+ <artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
diff --git a/opennlp-tools/src/test/java/opennlp/tools/HighMemoryUsage.java b/opennlp-tools/src/test/java/opennlp/tools/HighMemoryUsage.java
index ddafd036..0fe3c3f3 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/HighMemoryUsage.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/HighMemoryUsage.java
@@ -17,8 +17,18 @@
package opennlp.tools;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import org.junit.jupiter.api.Tag;
+
/**
* Marker class for tests with heap memory usage above 4 GB.
*/
-public class HighMemoryUsage {
+@Target( {ElementType.TYPE, ElementType.METHOD})
+@Retention(RetentionPolicy.RUNTIME)
+@Tag("opennlp.tools.HighMemoryUsage")
+public @interface HighMemoryUsage {
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkSampleStreamTest.java
index 529a27e2..84eda5a1 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkSampleStreamTest.java
@@ -20,8 +20,8 @@ package opennlp.tools.chunker;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.util.MockInputStreamFactory;
import opennlp.tools.util.ObjectStream;
@@ -30,7 +30,7 @@ import opennlp.tools.util.PlainTextByLineStream;
public class ChunkSampleStreamTest {
@Test
- public void testReadingEvents() throws IOException {
+ void testReadingEvents() throws IOException {
String sample = "word11 tag11 pred11" +
'\n' +
@@ -59,30 +59,30 @@ public class ChunkSampleStreamTest {
// read first sample
ChunkSample firstSample = chunkStream.read();
- Assert.assertEquals("word11", firstSample.getSentence()[0]);
- Assert.assertEquals("tag11", firstSample.getTags()[0]);
- Assert.assertEquals("pred11", firstSample.getPreds()[0]);
- Assert.assertEquals("word12", firstSample.getSentence()[1]);
- Assert.assertEquals("tag12", firstSample.getTags()[1]);
- Assert.assertEquals("pred12", firstSample.getPreds()[1]);
- Assert.assertEquals("word13", firstSample.getSentence()[2]);
- Assert.assertEquals("tag13", firstSample.getTags()[2]);
- Assert.assertEquals("pred13", firstSample.getPreds()[2]);
+ Assertions.assertEquals("word11", firstSample.getSentence()[0]);
+ Assertions.assertEquals("tag11", firstSample.getTags()[0]);
+ Assertions.assertEquals("pred11", firstSample.getPreds()[0]);
+ Assertions.assertEquals("word12", firstSample.getSentence()[1]);
+ Assertions.assertEquals("tag12", firstSample.getTags()[1]);
+ Assertions.assertEquals("pred12", firstSample.getPreds()[1]);
+ Assertions.assertEquals("word13", firstSample.getSentence()[2]);
+ Assertions.assertEquals("tag13", firstSample.getTags()[2]);
+ Assertions.assertEquals("pred13", firstSample.getPreds()[2]);
// read second sample
ChunkSample secondSample = chunkStream.read();
- Assert.assertEquals("word21", secondSample.getSentence()[0]);
- Assert.assertEquals("tag21", secondSample.getTags()[0]);
- Assert.assertEquals("pred21", secondSample.getPreds()[0]);
- Assert.assertEquals("word22", secondSample.getSentence()[1]);
- Assert.assertEquals("tag22", secondSample.getTags()[1]);
- Assert.assertEquals("pred22", secondSample.getPreds()[1]);
- Assert.assertEquals("word23", secondSample.getSentence()[2]);
- Assert.assertEquals("tag23", secondSample.getTags()[2]);
- Assert.assertEquals("pred23", secondSample.getPreds()[2]);
-
- Assert.assertNull(chunkStream.read());
+ Assertions.assertEquals("word21", secondSample.getSentence()[0]);
+ Assertions.assertEquals("tag21", secondSample.getTags()[0]);
+ Assertions.assertEquals("pred21", secondSample.getPreds()[0]);
+ Assertions.assertEquals("word22", secondSample.getSentence()[1]);
+ Assertions.assertEquals("tag22", secondSample.getTags()[1]);
+ Assertions.assertEquals("pred22", secondSample.getPreds()[1]);
+ Assertions.assertEquals("word23", secondSample.getSentence()[2]);
+ Assertions.assertEquals("tag23", secondSample.getTags()[2]);
+ Assertions.assertEquals("pred23", secondSample.getPreds()[2]);
+
+ Assertions.assertNull(chunkStream.read());
chunkStream.close();
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkSampleTest.java b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkSampleTest.java
index 79b52341..9e2fd462 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkSampleTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkSampleTest.java
@@ -29,8 +29,8 @@ import java.io.StringReader;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.util.InputStreamFactory;
@@ -114,7 +114,7 @@ public class ChunkSampleTest {
}
@Test
- public void testChunkSampleSerDe() throws IOException {
+ void testChunkSampleSerDe() throws IOException {
ChunkSample chunkSample = createGoldSample();
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
ObjectOutput out = new ObjectOutputStream(byteArrayOutputStream);
@@ -132,32 +132,34 @@ public class ChunkSampleTest {
// do nothing
}
- Assert.assertNotNull(deSerializedChunkSample);
- Assert.assertArrayEquals(chunkSample.getPhrasesAsSpanList(),
+ Assertions.assertNotNull(deSerializedChunkSample);
+ Assertions.assertArrayEquals(chunkSample.getPhrasesAsSpanList(),
deSerializedChunkSample.getPhrasesAsSpanList());
- Assert.assertArrayEquals(chunkSample.getPreds(), deSerializedChunkSample.getPreds());
- Assert.assertArrayEquals(chunkSample.getTags(), deSerializedChunkSample.getTags());
- Assert.assertArrayEquals(chunkSample.getSentence(), deSerializedChunkSample.getSentence());
- Assert.assertEquals(chunkSample, deSerializedChunkSample);
+ Assertions.assertArrayEquals(chunkSample.getPreds(), deSerializedChunkSample.getPreds());
+ Assertions.assertArrayEquals(chunkSample.getTags(), deSerializedChunkSample.getTags());
+ Assertions.assertArrayEquals(chunkSample.getSentence(), deSerializedChunkSample.getSentence());
+ Assertions.assertEquals(chunkSample, deSerializedChunkSample);
}
- @Test(expected = IllegalArgumentException.class)
- public void testParameterValidation() {
- new ChunkSample(new String[] {""}, new String[] {""},
- new String[] {"test", "one element to much"});
+ @Test
+ void testParameterValidation() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ new ChunkSample(new String[] {""}, new String[] {""},
+ new String[] {"test", "one element to much"});
+ });
}
@Test
- public void testRetrievingContent() {
+ void testRetrievingContent() {
ChunkSample sample = new ChunkSample(createSentence(), createTags(), createChunks());
- Assert.assertArrayEquals(createSentence(), sample.getSentence());
- Assert.assertArrayEquals(createTags(), sample.getTags());
- Assert.assertArrayEquals(createChunks(), sample.getPreds());
+ Assertions.assertArrayEquals(createSentence(), sample.getSentence());
+ Assertions.assertArrayEquals(createTags(), sample.getTags());
+ Assertions.assertArrayEquals(createChunks(), sample.getPreds());
}
@Test
- public void testToString() throws IOException {
+ void testToString() throws IOException {
ChunkSample sample = new ChunkSample(createSentence(), createTags(), createChunks());
String[] sentence = createSentence();
@@ -169,41 +171,41 @@ public class ChunkSampleTest {
for (int i = 0; i < sentence.length; i++) {
String line = reader.readLine();
String[] parts = line.split("\\s+");
- Assert.assertEquals(3, parts.length);
- Assert.assertEquals(sentence[i], parts[0]);
- Assert.assertEquals(tags[i], parts[1]);
- Assert.assertEquals(chunks[i], parts[2]);
+ Assertions.assertEquals(3, parts.length);
+ Assertions.assertEquals(sentence[i], parts[0]);
+ Assertions.assertEquals(tags[i], parts[1]);
+ Assertions.assertEquals(chunks[i], parts[2]);
}
}
@Test
- public void testNicePrint() {
+ void testNicePrint() {
ChunkSample sample = new ChunkSample(createSentence(), createTags(), createChunks());
- Assert.assertEquals(" [NP Forecasts_NNS ] [PP for_IN ] [NP the_DT trade_NN figures_NNS ] "
+ Assertions.assertEquals(" [NP Forecasts_NNS ] [PP for_IN ] [NP the_DT trade_NN figures_NNS ] "
+ "[VP range_VBP ] [ADVP widely_RB ] ,_, [NP Forecasts_NNS ] [PP for_IN ] "
+ "[NP the_DT trade_NN figures_NNS ] "
+ "[VP range_VBP ] [ADVP widely_RB ] ._.", sample.nicePrint());
}
@Test
- public void testAsSpan() {
+ void testAsSpan() {
ChunkSample sample = new ChunkSample(createSentence(), createTags(),
createChunks());
Span[] spans = sample.getPhrasesAsSpanList();
- Assert.assertEquals(10, spans.length);
- Assert.assertEquals(new Span(0, 1, "NP"), spans[0]);
- Assert.assertEquals(new Span(1, 2, "PP"), spans[1]);
- Assert.assertEquals(new Span(2, 5, "NP"), spans[2]);
- Assert.assertEquals(new Span(5, 6, "VP"), spans[3]);
- Assert.assertEquals(new Span(6, 7, "ADVP"), spans[4]);
- Assert.assertEquals(new Span(8, 9, "NP"), spans[5]);
- Assert.assertEquals(new Span(9, 10, "PP"), spans[6]);
- Assert.assertEquals(new Span(10, 13, "NP"), spans[7]);
- Assert.assertEquals(new Span(13, 14, "VP"), spans[8]);
- Assert.assertEquals(new Span(14, 15, "ADVP"), spans[9]);
+ Assertions.assertEquals(10, spans.length);
+ Assertions.assertEquals(new Span(0, 1, "NP"), spans[0]);
+ Assertions.assertEquals(new Span(1, 2, "PP"), spans[1]);
+ Assertions.assertEquals(new Span(2, 5, "NP"), spans[2]);
+ Assertions.assertEquals(new Span(5, 6, "VP"), spans[3]);
+ Assertions.assertEquals(new Span(6, 7, "ADVP"), spans[4]);
+ Assertions.assertEquals(new Span(8, 9, "NP"), spans[5]);
+ Assertions.assertEquals(new Span(9, 10, "PP"), spans[6]);
+ Assertions.assertEquals(new Span(10, 13, "NP"), spans[7]);
+ Assertions.assertEquals(new Span(13, 14, "VP"), spans[8]);
+ Assertions.assertEquals(new Span(14, 15, "ADVP"), spans[9]);
}
@@ -211,25 +213,25 @@ public class ChunkSampleTest {
// the same validateArguments method, we do a deeper test only once
@Test
- public void testPhraseAsSpan() {
+ void testPhraseAsSpan() {
Span[] spans = ChunkSample.phrasesAsSpanList(createSentence(),
createTags(), createChunks());
- Assert.assertEquals(10, spans.length);
- Assert.assertEquals(new Span(0, 1, "NP"), spans[0]);
- Assert.assertEquals(new Span(1, 2, "PP"), spans[1]);
- Assert.assertEquals(new Span(2, 5, "NP"), spans[2]);
- Assert.assertEquals(new Span(5, 6, "VP"), spans[3]);
- Assert.assertEquals(new Span(6, 7, "ADVP"), spans[4]);
- Assert.assertEquals(new Span(8, 9, "NP"), spans[5]);
- Assert.assertEquals(new Span(9, 10, "PP"), spans[6]);
- Assert.assertEquals(new Span(10, 13, "NP"), spans[7]);
- Assert.assertEquals(new Span(13, 14, "VP"), spans[8]);
- Assert.assertEquals(new Span(14, 15, "ADVP"), spans[9]);
+ Assertions.assertEquals(10, spans.length);
+ Assertions.assertEquals(new Span(0, 1, "NP"), spans[0]);
+ Assertions.assertEquals(new Span(1, 2, "PP"), spans[1]);
+ Assertions.assertEquals(new Span(2, 5, "NP"), spans[2]);
+ Assertions.assertEquals(new Span(5, 6, "VP"), spans[3]);
+ Assertions.assertEquals(new Span(6, 7, "ADVP"), spans[4]);
+ Assertions.assertEquals(new Span(8, 9, "NP"), spans[5]);
+ Assertions.assertEquals(new Span(9, 10, "PP"), spans[6]);
+ Assertions.assertEquals(new Span(10, 13, "NP"), spans[7]);
+ Assertions.assertEquals(new Span(13, 14, "VP"), spans[8]);
+ Assertions.assertEquals(new Span(14, 15, "ADVP"), spans[9]);
}
@Test
- public void testRegions() throws IOException {
+ void testRegions() throws IOException {
InputStreamFactory in = new ResourceAsStreamFactory(getClass(),
"/opennlp/tools/chunker/output.txt");
@@ -238,59 +240,69 @@ public class ChunkSampleTest {
ChunkSample cs1 = predictedSample.read();
String[] g1 = Span.spansToStrings(cs1.getPhrasesAsSpanList(), cs1.getSentence());
- Assert.assertEquals(15, g1.length);
+ Assertions.assertEquals(15, g1.length);
ChunkSample cs2 = predictedSample.read();
String[] g2 = Span.spansToStrings(cs2.getPhrasesAsSpanList(), cs2.getSentence());
- Assert.assertEquals(10, g2.length);
+ Assertions.assertEquals(10, g2.length);
ChunkSample cs3 = predictedSample.read();
String[] g3 = Span.spansToStrings(cs3.getPhrasesAsSpanList(), cs3.getSentence());
- Assert.assertEquals(7, g3.length);
- Assert.assertEquals("United", g3[0]);
- Assert.assertEquals("'s directors", g3[1]);
- Assert.assertEquals("voted", g3[2]);
- Assert.assertEquals("themselves", g3[3]);
- Assert.assertEquals("their spouses", g3[4]);
- Assert.assertEquals("lifetime access", g3[5]);
- Assert.assertEquals("to", g3[6]);
+ Assertions.assertEquals(7, g3.length);
+ Assertions.assertEquals("United", g3[0]);
+ Assertions.assertEquals("'s directors", g3[1]);
+ Assertions.assertEquals("voted", g3[2]);
+ Assertions.assertEquals("themselves", g3[3]);
+ Assertions.assertEquals("their spouses", g3[4]);
+ Assertions.assertEquals("lifetime access", g3[5]);
+ Assertions.assertEquals("to", g3[6]);
predictedSample.close();
}
- @Test(expected = IllegalArgumentException.class)
- public void testInvalidPhraseAsSpan1() {
- ChunkSample.phrasesAsSpanList(new String[2], new String[1], new String[1]);
+ @Test
+ void testInvalidPhraseAsSpan1() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ ChunkSample.phrasesAsSpanList(new String[2], new String[1], new String[1]);
+ });
}
- @Test(expected = IllegalArgumentException.class)
- public void testInvalidPhraseAsSpan2() {
- ChunkSample.phrasesAsSpanList(new String[1], new String[2], new String[1]);
+ @Test
+ void testInvalidPhraseAsSpan2() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ ChunkSample.phrasesAsSpanList(new String[1], new String[2], new String[1]);
+ });
}
- @Test(expected = IllegalArgumentException.class)
- public void testInvalidPhraseAsSpan3() {
- ChunkSample.phrasesAsSpanList(new String[1], new String[1], new String[2]);
+ @Test
+ void testInvalidPhraseAsSpan3() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ ChunkSample.phrasesAsSpanList(new String[1], new String[1], new String[2]);
+ });
}
- @Test(expected = IllegalArgumentException.class)
- public void testInvalidChunkSampleArray() {
- new ChunkSample(new String[1], new String[1], new String[2]);
+ @Test
+ void testInvalidChunkSampleArray() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ new ChunkSample(new String[1], new String[1], new String[2]);
+ });
}
- @Test(expected = IllegalArgumentException.class)
- public void testInvalidChunkSampleList() {
- new ChunkSample(Arrays.asList(new String[1]), Arrays.asList(new String[1]),
- Arrays.asList(new String[2]));
+ @Test
+ void testInvalidChunkSampleList() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ new ChunkSample(Arrays.asList(new String[1]), Arrays.asList(new String[1]),
+ Arrays.asList(new String[2]));
+ });
}
@Test
- public void testEquals() {
- Assert.assertFalse(createGoldSample() == createGoldSample());
- Assert.assertTrue(createGoldSample().equals(createGoldSample()));
- Assert.assertFalse(createPredSample().equals(createGoldSample()));
- Assert.assertFalse(createPredSample().equals(new Object()));
+ void testEquals() {
+ Assertions.assertFalse(createGoldSample() == createGoldSample());
+ Assertions.assertTrue(createGoldSample().equals(createGoldSample()));
+ Assertions.assertFalse(createPredSample().equals(createGoldSample()));
+ Assertions.assertFalse(createPredSample().equals(new Object()));
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerDetailedFMeasureListenerTest.java b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerDetailedFMeasureListenerTest.java
index 5a5aca0e..00031b41 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerDetailedFMeasureListenerTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerDetailedFMeasureListenerTest.java
@@ -23,8 +23,8 @@ import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.Locale;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.cmdline.chunker.ChunkerDetailedFMeasureListener;
import opennlp.tools.formats.ResourceAsStreamFactory;
@@ -33,7 +33,7 @@ import opennlp.tools.util.PlainTextByLineStream;
public class ChunkerDetailedFMeasureListenerTest {
@Test
- public void testEvaluator() throws IOException {
+ void testEvaluator() throws IOException {
ResourceAsStreamFactory inPredicted = new ResourceAsStreamFactory(
getClass(), "/opennlp/tools/chunker/output.txt");
@@ -66,6 +66,6 @@ public class ChunkerDetailedFMeasureListenerTest {
line = reader.readLine();
}
- Assert.assertEquals(expected.toString().trim(), listener.createReport(Locale.ENGLISH).trim());
+ Assertions.assertEquals(expected.toString().trim(), listener.createReport(Locale.ENGLISH).trim());
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerEvaluatorTest.java b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerEvaluatorTest.java
index 3d11e1ca..1cdbadf4 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerEvaluatorTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerEvaluatorTest.java
@@ -22,8 +22,8 @@ import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.cmdline.chunker.ChunkEvaluationErrorListener;
import opennlp.tools.formats.ResourceAsStreamFactory;
@@ -48,7 +48,7 @@ public class ChunkerEvaluatorTest {
* @throws IOException
*/
@Test
- public void testEvaluator() throws IOException {
+ void testEvaluator() throws IOException {
ResourceAsStreamFactory inPredicted = new ResourceAsStreamFactory(
getClass(), "/opennlp/tools/chunker/output.txt");
ResourceAsStreamFactory inExpected = new ResourceAsStreamFactory(getClass(),
@@ -70,14 +70,14 @@ public class ChunkerEvaluatorTest {
FMeasure fm = evaluator.getFMeasure();
- Assert.assertEquals(0.8d, fm.getPrecisionScore(), DELTA);
- Assert.assertEquals(0.875d, fm.getRecallScore(), DELTA);
+ Assertions.assertEquals(0.8d, fm.getPrecisionScore(), DELTA);
+ Assertions.assertEquals(0.875d, fm.getRecallScore(), DELTA);
- Assert.assertNotSame(stream.toString().length(), 0);
+ Assertions.assertNotSame(stream.toString().length(), 0);
}
@Test
- public void testEvaluatorNoError() throws IOException {
+ void testEvaluatorNoError() throws IOException {
ResourceAsStreamFactory inPredicted = new ResourceAsStreamFactory(
getClass(), "/opennlp/tools/chunker/output.txt");
ResourceAsStreamFactory inExpected = new ResourceAsStreamFactory(getClass(),
@@ -100,10 +100,10 @@ public class ChunkerEvaluatorTest {
FMeasure fm = evaluator.getFMeasure();
- Assert.assertEquals(1d, fm.getPrecisionScore(), DELTA);
- Assert.assertEquals(1d, fm.getRecallScore(), DELTA);
+ Assertions.assertEquals(1d, fm.getPrecisionScore(), DELTA);
+ Assertions.assertEquals(1d, fm.getRecallScore(), DELTA);
- Assert.assertEquals(stream.toString().length(), 0);
+ Assertions.assertEquals(stream.toString().length(), 0);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerFactoryTest.java b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerFactoryTest.java
index 42051975..2113b07a 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerFactoryTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerFactoryTest.java
@@ -22,8 +22,8 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.util.ObjectStream;
@@ -52,13 +52,13 @@ public class ChunkerFactoryTest {
}
@Test
- public void testDefaultFactory() throws IOException {
+ void testDefaultFactory() throws IOException {
ChunkerModel model = trainModel(ModelType.MAXENT, new ChunkerFactory());
ChunkerFactory factory = model.getFactory();
- Assert.assertTrue(factory.getContextGenerator() instanceof DefaultChunkerContextGenerator);
- Assert.assertTrue(factory.getSequenceValidator() instanceof DefaultChunkerSequenceValidator);
+ Assertions.assertTrue(factory.getContextGenerator() instanceof DefaultChunkerContextGenerator);
+ Assertions.assertTrue(factory.getSequenceValidator() instanceof DefaultChunkerSequenceValidator);
ByteArrayOutputStream out = new ByteArrayOutputStream();
model.serialize(out);
@@ -67,19 +67,21 @@ public class ChunkerFactoryTest {
ChunkerModel fromSerialized = new ChunkerModel(in);
factory = fromSerialized.getFactory();
- Assert.assertTrue(factory.getContextGenerator() instanceof DefaultChunkerContextGenerator);
- Assert.assertTrue(factory.getSequenceValidator() instanceof DefaultChunkerSequenceValidator);
+ Assertions.assertTrue(factory.getContextGenerator() instanceof DefaultChunkerContextGenerator);
+ Assertions.assertTrue(factory.getSequenceValidator() instanceof DefaultChunkerSequenceValidator);
}
@Test
- public void testDummyFactory() throws IOException {
+ void testDummyFactory() throws IOException {
ChunkerModel model = trainModel(ModelType.MAXENT, new DummyChunkerFactory());
DummyChunkerFactory factory = (DummyChunkerFactory) model.getFactory();
- Assert.assertTrue(factory.getContextGenerator() instanceof DummyChunkerFactory.DummyContextGenerator);
- Assert.assertTrue(factory.getSequenceValidator() instanceof DummyChunkerFactory.DummySequenceValidator);
+ Assertions.assertTrue(factory.getContextGenerator()
+ instanceof DummyChunkerFactory.DummyContextGenerator);
+ Assertions.assertTrue(factory.getSequenceValidator()
+ instanceof DummyChunkerFactory.DummySequenceValidator);
ByteArrayOutputStream out = new ByteArrayOutputStream();
@@ -89,8 +91,10 @@ public class ChunkerFactoryTest {
ChunkerModel fromSerialized = new ChunkerModel(in);
factory = (DummyChunkerFactory) fromSerialized.getFactory();
- Assert.assertTrue(factory.getContextGenerator() instanceof DefaultChunkerContextGenerator);
- Assert.assertTrue(factory.getSequenceValidator() instanceof DefaultChunkerSequenceValidator);
+ Assertions.assertTrue(factory.getContextGenerator()
+ instanceof DefaultChunkerContextGenerator);
+ Assertions.assertTrue(factory.getSequenceValidator()
+ instanceof DefaultChunkerSequenceValidator);
ChunkerME chunker = new ChunkerME(model);
diff --git a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerMEIT.java b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerMEIT.java
index 6d7f4108..7384bf3f 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerMEIT.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerMEIT.java
@@ -19,30 +19,30 @@ package opennlp.tools.chunker;
import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
public class ChunkerMEIT {
- private static String[] toks1 = { "Rockwell", "said", "the", "agreement", "calls", "for",
+ private static String[] toks1 = {"Rockwell", "said", "the", "agreement", "calls", "for",
"it", "to", "supply", "200", "additional", "so-called", "shipsets",
- "for", "the", "planes", "." };
+ "for", "the", "planes", "."};
- private static String[] tags1 = { "NNP", "VBD", "DT", "NN", "VBZ", "IN", "PRP", "TO", "VB",
- "CD", "JJ", "JJ", "NNS", "IN", "DT", "NNS", "." };
+ private static String[] tags1 = {"NNP", "VBD", "DT", "NN", "VBZ", "IN", "PRP", "TO", "VB",
+ "CD", "JJ", "JJ", "NNS", "IN", "DT", "NNS", "."};
- private static String[] expect1 = { "B-NP", "B-VP", "B-NP", "I-NP", "B-VP", "B-SBAR",
+ private static String[] expect1 = {"B-NP", "B-VP", "B-NP", "I-NP", "B-VP", "B-SBAR",
"B-NP", "B-VP", "I-VP", "B-NP", "I-NP", "I-NP", "I-NP", "B-PP", "B-NP",
- "I-NP", "O" };
+ "I-NP", "O"};
@Test
- public void downloadModel() throws IOException {
+ void downloadModel() throws IOException {
ChunkerME chunker = new ChunkerME("en");
String[] preds = chunker.chunk(toks1, tags1);
- Assert.assertArrayEquals(expect1, preds);
+ Assertions.assertArrayEquals(expect1, preds);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerMETest.java b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerMETest.java
index cfbd815e..7cefcebe 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerMETest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerMETest.java
@@ -21,9 +21,9 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.namefind.NameFinderME;
@@ -49,33 +49,39 @@ import opennlp.tools.util.TrainingParameters;
* training sentences and then the computed model is used to predict sentences
* from the training sentences.
*/
+
public class ChunkerMETest {
private Chunker chunker;
- private static String[] toks1 = { "Rockwell", "said", "the", "agreement", "calls", "for",
+ private static String[] toks1 = {"Rockwell", "said", "the", "agreement", "calls", "for",
"it", "to", "supply", "200", "additional", "so-called", "shipsets",
- "for", "the", "planes", "." };
+ "for", "the", "planes", "."};
- private static String[] tags1 = { "NNP", "VBD", "DT", "NN", "VBZ", "IN", "PRP", "TO", "VB",
- "CD", "JJ", "JJ", "NNS", "IN", "DT", "NNS", "." };
+ private static String[] tags1 = {"NNP", "VBD", "DT", "NN", "VBZ", "IN", "PRP", "TO", "VB",
+ "CD", "JJ", "JJ", "NNS", "IN", "DT", "NNS", "."};
- private static String[] expect1 = { "B-NP", "B-VP", "B-NP", "I-NP", "B-VP", "B-SBAR",
+ private static String[] expect1 = {"B-NP", "B-VP", "B-NP", "I-NP", "B-VP", "B-SBAR",
"B-NP", "B-VP", "I-VP", "B-NP", "I-NP", "I-NP", "I-NP", "B-PP", "B-NP",
- "I-NP", "O" };
+ "I-NP", "O"};
- @Test(expected = IOException.class)
- public void downloadNonExistentModel() throws IOException {
+ @Test
+ void downloadNonExistentModel() {
- ChunkerME chunker = new ChunkerME("en");
+ Assertions.assertThrows(IOException.class, () -> {
+
+ ChunkerME chunker = new ChunkerME("en");
+
+ String[] preds = chunker.chunk(toks1, tags1);
+
+ Assertions.assertArrayEquals(expect1, preds);
+ });
- String[] preds = chunker.chunk(toks1, tags1);
- Assert.assertArrayEquals(expect1, preds);
}
- @Before
- public void startup() throws IOException {
+ @BeforeEach
+ void startup() throws IOException {
// train the chunker
ResourceAsStreamFactory in = new ResourceAsStreamFactory(getClass(),
@@ -94,66 +100,71 @@ public class ChunkerMETest {
}
@Test
- public void testChunkAsArray() throws Exception {
+ void testChunkAsArray() {
String[] preds = chunker.chunk(toks1, tags1);
- Assert.assertArrayEquals(expect1, preds);
+ Assertions.assertArrayEquals(expect1, preds);
}
@Test
- public void testChunkAsSpan() throws Exception {
+ void testChunkAsSpan() {
Span[] preds = chunker.chunkAsSpans(toks1, tags1);
System.out.println(Arrays.toString(preds));
- Assert.assertEquals(10, preds.length);
- Assert.assertEquals(new Span(0, 1, "NP"), preds[0]);
- Assert.assertEquals(new Span(1, 2, "VP"), preds[1]);
- Assert.assertEquals(new Span(2, 4, "NP"), preds[2]);
- Assert.assertEquals(new Span(4, 5, "VP"), preds[3]);
- Assert.assertEquals(new Span(5, 6, "SBAR"), preds[4]);
- Assert.assertEquals(new Span(6, 7, "NP"), preds[5]);
- Assert.assertEquals(new Span(7, 9, "VP"), preds[6]);
- Assert.assertEquals(new Span(9, 13, "NP"), preds[7]);
- Assert.assertEquals(new Span(13, 14, "PP"), preds[8]);
- Assert.assertEquals(new Span(14, 16, "NP"), preds[9]);
+ Assertions.assertEquals(10, preds.length);
+ Assertions.assertEquals(new Span(0, 1, "NP"), preds[0]);
+ Assertions.assertEquals(new Span(1, 2, "VP"), preds[1]);
+ Assertions.assertEquals(new Span(2, 4, "NP"), preds[2]);
+ Assertions.assertEquals(new Span(4, 5, "VP"), preds[3]);
+ Assertions.assertEquals(new Span(5, 6, "SBAR"), preds[4]);
+ Assertions.assertEquals(new Span(6, 7, "NP"), preds[5]);
+ Assertions.assertEquals(new Span(7, 9, "VP"), preds[6]);
+ Assertions.assertEquals(new Span(9, 13, "NP"), preds[7]);
+ Assertions.assertEquals(new Span(13, 14, "PP"), preds[8]);
+ Assertions.assertEquals(new Span(14, 16, "NP"), preds[9]);
}
@Test
- public void testTokenProbArray() throws Exception {
+ void testTokenProbArray() {
Sequence[] preds = chunker.topKSequences(toks1, tags1);
- Assert.assertTrue(preds.length > 0);
- Assert.assertEquals(expect1.length, preds[0].getProbs().length);
- Assert.assertEquals(Arrays.asList(expect1), preds[0].getOutcomes());
- Assert.assertNotSame(Arrays.asList(expect1), preds[1].getOutcomes());
+ Assertions.assertTrue(preds.length > 0);
+ Assertions.assertEquals(expect1.length, preds[0].getProbs().length);
+ Assertions.assertEquals(Arrays.asList(expect1), preds[0].getOutcomes());
+ Assertions.assertNotSame(Arrays.asList(expect1), preds[1].getOutcomes());
}
@Test
- public void testTokenProbMinScore() throws Exception {
+ void testTokenProbMinScore() {
Sequence[] preds = chunker.topKSequences(toks1, tags1, -5.55);
- Assert.assertEquals(4, preds.length);
- Assert.assertEquals(expect1.length, preds[0].getProbs().length);
- Assert.assertEquals(Arrays.asList(expect1), preds[0].getOutcomes());
- Assert.assertNotSame(Arrays.asList(expect1), preds[1].getOutcomes());
+ Assertions.assertEquals(4, preds.length);
+ Assertions.assertEquals(expect1.length, preds[0].getProbs().length);
+ Assertions.assertEquals(Arrays.asList(expect1), preds[0].getOutcomes());
+ Assertions.assertNotSame(Arrays.asList(expect1), preds[1].getOutcomes());
}
-
- @Test(expected = InsufficientTrainingDataException.class)
- public void testInsufficientData() throws IOException {
- ResourceAsStreamFactory in = new ResourceAsStreamFactory(getClass(),
- "/opennlp/tools/chunker/test-insufficient.txt");
+ @Test
+ void testInsufficientData() {
- ObjectStream<ChunkSample> sampleStream = new ChunkSampleStream(
- new PlainTextByLineStream(in, StandardCharsets.UTF_8));
+ Assertions.assertThrows(InsufficientTrainingDataException.class, () -> {
- TrainingParameters params = new TrainingParameters();
- params.put(TrainingParameters.ITERATIONS_PARAM, 70);
- params.put(TrainingParameters.CUTOFF_PARAM, 1);
+ ResourceAsStreamFactory in = new ResourceAsStreamFactory(getClass(),
+ "/opennlp/tools/chunker/test-insufficient.txt");
+
+ ObjectStream<ChunkSample> sampleStream = new ChunkSampleStream(
+ new PlainTextByLineStream(in, StandardCharsets.UTF_8));
+
+ TrainingParameters params = new TrainingParameters();
+ params.put(TrainingParameters.ITERATIONS_PARAM, 70);
+ params.put(TrainingParameters.CUTOFF_PARAM, 1);
+
+ ChunkerME.train("eng", sampleStream, params, new ChunkerFactory());
+
+ });
- ChunkerME.train("eng", sampleStream, params, new ChunkerFactory());
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerModelTest.java b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerModelTest.java
index 0d6b513f..04ab7b11 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerModelTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/chunker/ChunkerModelTest.java
@@ -17,8 +17,8 @@
package opennlp.tools.chunker;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
/**
* This is the test class for {@link ChunkerModel}.
@@ -26,34 +26,35 @@ import org.junit.Test;
public class ChunkerModelTest {
@Test
- public void testInvalidFactorySignature() throws Exception {
+ void testInvalidFactorySignature() throws Exception {
ChunkerModel model = null;
try {
model = new ChunkerModel(this.getClass().getResourceAsStream("chunker170custom.bin"));
} catch (IllegalArgumentException e) {
- Assert.assertTrue("Exception must state ChunkerFactory",
- e.getMessage().contains("ChunkerFactory"));
- Assert.assertTrue("Exception must mention DummyChunkerFactory",
- e.getMessage().contains("opennlp.tools.chunker.DummyChunkerFactory"));
+ Assertions.assertTrue(
+ e.getMessage().contains("ChunkerFactory"), "Exception must state ChunkerFactory");
+ Assertions.assertTrue(
+ e.getMessage().contains("opennlp.tools.chunker.DummyChunkerFactory"),
+ "Exception must mention DummyChunkerFactory");
}
- Assert.assertNull(model);
+ Assertions.assertNull(model);
}
@Test
- public void test170DefaultFactory() throws Exception {
+ void test170DefaultFactory() throws Exception {
// This is an OpenNLP 1.x model. It should load with OpenNLP 2.x.
- Assert.assertNotNull(
+ Assertions.assertNotNull(
new ChunkerModel(this.getClass().getResourceAsStream("chunker170default.bin")));
}
@Test
- public void test180CustomFactory() throws Exception {
+ void test180CustomFactory() throws Exception {
// This is an OpenNLP 1.x model. It should load with OpenNLP 2.x.
- Assert.assertNotNull(
+ Assertions.assertNotNull(
new ChunkerModel(this.getClass().getResourceAsStream("chunker180custom.bin")));
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/cmdline/ArgumentParserTest.java b/opennlp-tools/src/test/java/opennlp/tools/cmdline/ArgumentParserTest.java
index ee1e7e73..b0f2de77 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/cmdline/ArgumentParserTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/cmdline/ArgumentParserTest.java
@@ -21,8 +21,8 @@ import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.cmdline.ArgumentParser.OptionalParameter;
import opennlp.tools.cmdline.ArgumentParser.ParameterDescription;
@@ -33,27 +33,33 @@ public class ArgumentParserTest {
interface ZeroMethods {
}
- @Test(expected = IllegalArgumentException.class)
- public void testZeroMethods() {
- ArgumentParser.createUsage(ZeroMethods.class);
+ @Test
+ void testZeroMethods() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ ArgumentParser.createUsage(ZeroMethods.class);
+ });
}
interface InvalidMethodName {
String invalidMethodName();
}
- @Test(expected = IllegalArgumentException.class)
- public void testInvalidMethodName() {
- ArgumentParser.createUsage(InvalidMethodName.class);
+ @Test
+ void testInvalidMethodName() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ ArgumentParser.createUsage(InvalidMethodName.class);
+ });
}
interface InvalidReturnType {
Exception getTest();
}
- @Test(expected = IllegalArgumentException.class)
- public void testInvalidReturnType() {
- ArgumentParser.createUsage(InvalidReturnType.class);
+ @Test
+ void testInvalidReturnType() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ ArgumentParser.createUsage(InvalidReturnType.class);
+ });
}
interface SimpleArguments extends AllOptionalArguments {
@@ -78,48 +84,55 @@ public class ArgumentParserTest {
@Test
- public void testSimpleArguments() {
+ void testSimpleArguments() {
String argsString = "-encoding UTF-8 -alphaNumOpt false";
SimpleArguments args = ArgumentParser.parse(argsString.split(" "), SimpleArguments.class);
- Assert.assertEquals(StandardCharsets.UTF_8.name(), args.getEncoding());
- Assert.assertEquals(Integer.valueOf(100), args.getIterations());
- Assert.assertNull(args.getCutoff());
- Assert.assertEquals(false, args.getAlphaNumOpt());
+ Assertions.assertEquals(StandardCharsets.UTF_8.name(), args.getEncoding());
+ Assertions.assertEquals(Integer.valueOf(100), args.getIterations());
+ Assertions.assertNull(args.getCutoff());
+ Assertions.assertEquals(false, args.getAlphaNumOpt());
}
- @Test(expected = IllegalArgumentException.class)
- public void testSimpleArgumentsMissingEncoding() {
- String argsString = "-alphaNumOpt false";
+ @Test
+ void testSimpleArgumentsMissingEncoding() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ String argsString = "-alphaNumOpt false";
+
+ Assertions.assertFalse(ArgumentParser.validateArguments(argsString.split(" "), SimpleArguments.class));
+ ArgumentParser.parse(argsString.split(" "), SimpleArguments.class);
+ });
- Assert.assertFalse(ArgumentParser.validateArguments(argsString.split(" "), SimpleArguments.class));
- ArgumentParser.parse(argsString.split(" "), SimpleArguments.class);
}
@Test
- public void testAllOptionalArgumentsOneArgument() {
+ void testAllOptionalArgumentsOneArgument() {
String argsString = "-alphaNumOpt false";
- Assert.assertTrue(ArgumentParser.validateArguments(argsString.split(" "), AllOptionalArguments.class));
+ Assertions.assertTrue(ArgumentParser.validateArguments(argsString.split(" "),
+ AllOptionalArguments.class));
ArgumentParser.parse(argsString.split(" "), AllOptionalArguments.class);
}
@Test
- public void testAllOptionalArgumentsZeroArguments() {
+ void testAllOptionalArgumentsZeroArguments() {
String[] args = {};
- Assert.assertTrue(ArgumentParser.validateArguments(args, AllOptionalArguments.class));
+ Assertions.assertTrue(ArgumentParser.validateArguments(args, AllOptionalArguments.class));
ArgumentParser.parse(args, AllOptionalArguments.class);
}
- @Test(expected = IllegalArgumentException.class)
- public void testAllOptionalArgumentsExtraArgument() {
- String argsString = "-encoding UTF-8";
- Assert.assertFalse(ArgumentParser.validateArguments(argsString.split(" "), AllOptionalArguments.class));
- ArgumentParser.parse(argsString.split(" "), AllOptionalArguments.class);
+ @Test
+ void testAllOptionalArgumentsExtraArgument() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ String argsString = "-encoding UTF-8";
+ Assertions.assertFalse(ArgumentParser.validateArguments(argsString.split(" "),
+ AllOptionalArguments.class));
+ ArgumentParser.parse(argsString.split(" "), AllOptionalArguments.class);
+ });
}
@Test
- public void testSimpleArgumentsUsage() {
+ void testSimpleArgumentsUsage() {
String[] arguments = new String[] {"-encoding charset",
"[-iterations num]",
@@ -129,12 +142,12 @@ public class ArgumentParserTest {
int expectedLength = 2;
for (String arg : arguments) {
- Assert.assertTrue(usage.contains(arg));
+ Assertions.assertTrue(usage.contains(arg));
expectedLength += arg.length();
}
- Assert.assertTrue(usage.contains("a charset encoding"));
- Assert.assertTrue(expectedLength < usage.length());
+ Assertions.assertTrue(usage.contains("a charset encoding"));
+ Assertions.assertTrue(expectedLength < usage.length());
}
interface ExtendsEncodingParameter extends EncodingParameter {
@@ -143,17 +156,17 @@ public class ArgumentParserTest {
}
@Test
- public void testDefaultEncodingParameter() {
+ void testDefaultEncodingParameter() {
String[] args = "-something aValue".split(" ");
- Assert.assertTrue(ArgumentParser.validateArguments(args, ExtendsEncodingParameter.class));
+ Assertions.assertTrue(ArgumentParser.validateArguments(args, ExtendsEncodingParameter.class));
ExtendsEncodingParameter params = ArgumentParser.parse(args, ExtendsEncodingParameter.class);
- Assert.assertEquals(Charset.defaultCharset(), params.getEncoding());
+ Assertions.assertEquals(Charset.defaultCharset(), params.getEncoding());
}
@Test
- public void testSetEncodingParameter() {
+ void testSetEncodingParameter() {
Collection<Charset> availableCharset = Charset.availableCharsets().values();
String notTheDefaultCharset = StandardCharsets.UTF_8.name();
for (Charset charset : availableCharset) {
@@ -164,9 +177,9 @@ public class ArgumentParserTest {
}
String[] args = ("-something aValue -encoding " + notTheDefaultCharset).split(" ");
- Assert.assertTrue(ArgumentParser.validateArguments(args, ExtendsEncodingParameter.class));
+ Assertions.assertTrue(ArgumentParser.validateArguments(args, ExtendsEncodingParameter.class));
ExtendsEncodingParameter params = ArgumentParser.parse(args, ExtendsEncodingParameter.class);
- Assert.assertEquals(Charset.forName(notTheDefaultCharset), params.getEncoding());
+ Assertions.assertEquals(Charset.forName(notTheDefaultCharset), params.getEncoding());
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/cmdline/CLITest.java b/opennlp-tools/src/test/java/opennlp/tools/cmdline/CLITest.java
index 5a358880..f09cd95d 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/cmdline/CLITest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/cmdline/CLITest.java
@@ -19,10 +19,10 @@ package opennlp.tools.cmdline;
import java.security.Permission;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
public class CLITest {
@@ -61,8 +61,8 @@ public class CLITest {
private final SecurityManager originalSecurityManager = System.getSecurityManager();
- @Before
- public void installNoExitSecurityManager() {
+ @BeforeEach
+ void installNoExitSecurityManager() {
System.setSecurityManager(new NoExitSecurityManager());
}
@@ -70,12 +70,12 @@ public class CLITest {
* Ensure the main method does not fail to print help message.
*/
@Test
- public void testMainHelpMessage() {
+ void testMainHelpMessage() {
try {
- CLI.main(new String[]{});
+ CLI.main(new String[] {});
} catch (ExitException e) {
- Assert.assertEquals(0, e.status());
+ Assertions.assertEquals(0, e.status());
}
}
@@ -83,11 +83,11 @@ public class CLITest {
* Ensure the main method prints error and returns 1.
*/
@Test
- public void testUnknownToolMessage() {
+ void testUnknownToolMessage() {
try {
- CLI.main(new String[]{"unknown name"});
+ CLI.main(new String[] {"unknown name"});
} catch (ExitException e) {
- Assert.assertEquals(1, e.status());
+ Assertions.assertEquals(1, e.status());
}
}
@@ -95,11 +95,11 @@ public class CLITest {
* Ensure the tool checks the parameter and returns 1.
*/
@Test
- public void testToolParameterMessage() {
+ void testToolParameterMessage() {
try {
- CLI.main(new String[]{"DoccatTrainer", "-param", "value"});
+ CLI.main(new String[] {"DoccatTrainer", "-param", "value"});
} catch (ExitException e) {
- Assert.assertEquals(1, e.status());
+ Assertions.assertEquals(1, e.status());
}
}
@@ -107,11 +107,11 @@ public class CLITest {
* Ensure the main method prints error and returns -1
*/
@Test
- public void testUnknownFileMessage() {
+ void testUnknownFileMessage() {
try {
- CLI.main(new String[]{"Doccat", "unknown.model"});
+ CLI.main(new String[] {"Doccat", "unknown.model"});
} catch (ExitException e) {
- Assert.assertEquals(-1, e.status());
+ Assertions.assertEquals(-1, e.status());
}
}
@@ -120,20 +120,20 @@ public class CLITest {
* Ensure all tools do not fail printing help message;
*/
@Test
- public void testHelpMessageOfTools() {
+ void testHelpMessageOfTools() {
for (String toolName : CLI.getToolNames()) {
System.err.println("-> ToolName" + toolName);
try {
- CLI.main(new String[]{toolName, "help"});
+ CLI.main(new String[] {toolName, "help"});
} catch (ExitException e) {
- Assert.assertEquals(0, e.status());
+ Assertions.assertEquals(0, e.status());
}
}
}
- @After
- public void restoreSecurityManager() {
+ @AfterEach
+ void restoreSecurityManager() {
System.setSecurityManager(originalSecurityManager);
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/cmdline/TerminateToolExceptionTest.java b/opennlp-tools/src/test/java/opennlp/tools/cmdline/TerminateToolExceptionTest.java
index cecff70b..22e53cc7 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/cmdline/TerminateToolExceptionTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/cmdline/TerminateToolExceptionTest.java
@@ -17,8 +17,8 @@
package opennlp.tools.cmdline;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
/**
* Tests for the {@link TerminateToolException} class.
@@ -26,8 +26,8 @@ import org.junit.Test;
public class TerminateToolExceptionTest {
@Test
- public void testCreation() {
+ void testCreation() {
TerminateToolException e = new TerminateToolException(-500);
- Assert.assertEquals(-500, e.getCode());
+ Assertions.assertEquals(-500, e.getCode());
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/cmdline/TokenNameFinderToolTest.java b/opennlp-tools/src/test/java/opennlp/tools/cmdline/TokenNameFinderToolTest.java
index 830f3d2e..e5925d88 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/cmdline/TokenNameFinderToolTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/cmdline/TokenNameFinderToolTest.java
@@ -27,8 +27,8 @@ import java.io.InputStream;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.cmdline.namefind.TokenNameFinderTool;
import opennlp.tools.namefind.NameFinderME;
@@ -44,45 +44,50 @@ import opennlp.tools.util.TrainingParameters;
public class TokenNameFinderToolTest {
@Test
- public void run() throws IOException {
+ void run() throws IOException {
File model1 = trainModel();
- String[] args = new String[]{model1.getAbsolutePath()};
-
+ String[] args = new String[] {model1.getAbsolutePath()};
+
final String in = "It is Stefanie Schmidt.\n\nNothing in this sentence.";
InputStream stream = new ByteArrayInputStream(in.getBytes(StandardCharsets.UTF_8));
-
+
System.setIn(stream);
-
+
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
System.setOut(ps);
TokenNameFinderTool tool = new TokenNameFinderTool();
tool.run(args);
-
+
final String content = new String(baos.toByteArray(), StandardCharsets.UTF_8);
- Assert.assertTrue(content.contains("It is <START:person> Stefanie Schmidt. <END>"));
+ Assertions.assertTrue(content.contains("It is <START:person> Stefanie Schmidt. <END>"));
model1.delete();
}
- @Test(expected = TerminateToolException.class)
- public void invalidModel() {
+ @Test
+ void invalidModel() {
+
+ Assertions.assertThrows(TerminateToolException.class, () -> {
- String[] args = new String[]{"invalidmodel.bin"};
+ String[] args = new String[] {"invalidmodel.bin"};
+
+ TokenNameFinderTool tool = new TokenNameFinderTool();
+ tool.run(args);
+
+ });
- TokenNameFinderTool tool = new TokenNameFinderTool();
- tool.run(args);
}
-
- @Test()
- public void usage() {
- String[] args = new String[]{};
-
+ @Test
+ void usage() {
+
+ String[] args = new String[] {};
+
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
System.setOut(ps);
@@ -91,10 +96,10 @@ public class TokenNameFinderToolTest {
tool.run(args);
final String content = new String(baos.toByteArray(), StandardCharsets.UTF_8);
- Assert.assertEquals(tool.getHelp(), content.trim());
-
+ Assertions.assertEquals(tool.getHelp(), content.trim());
+
}
-
+
private File trainModel() throws IOException {
ObjectStream<String> lineStream =
@@ -105,7 +110,7 @@ public class TokenNameFinderToolTest {
TrainingParameters params = new TrainingParameters();
params.put(TrainingParameters.ITERATIONS_PARAM, 70);
params.put(TrainingParameters.CUTOFF_PARAM, 1);
-
+
TokenNameFinderModel model;
TokenNameFinderFactory nameFinderFactory = new TokenNameFinderFactory();
@@ -114,15 +119,15 @@ public class TokenNameFinderToolTest {
model = NameFinderME.train("eng", null, sampleStream, params,
nameFinderFactory);
}
-
+
File modelFile = File.createTempFile("model", ".bin");
-
+
try (BufferedOutputStream modelOut =
new BufferedOutputStream(new FileOutputStream(modelFile))) {
model.serialize(modelOut);
}
-
+
return modelFile;
}
-
+
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/convert/FileToStringSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/convert/FileToStringSampleStreamTest.java
index e9f3892b..b274be94 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/convert/FileToStringSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/convert/FileToStringSampleStreamTest.java
@@ -20,48 +20,48 @@ package opennlp.tools.convert;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
+import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.io.FileUtils;
-import org.junit.Assert;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
import opennlp.tools.formats.DirectorySampleStream;
import opennlp.tools.formats.convert.FileToStringSampleStream;
public class FileToStringSampleStreamTest {
- @Rule
- public TemporaryFolder directory = new TemporaryFolder();
+ @TempDir
+ public Path directory;
@Test
public void readFileTest() throws IOException {
final String sentence1 = "This is a sentence.";
final String sentence2 = "This is another sentence.";
-
+
List<String> sentences = Arrays.asList(sentence1, sentence2);
-
+
DirectorySampleStream directorySampleStream =
- new DirectorySampleStream(directory.getRoot(), null, false);
-
- File tempFile1 = directory.newFile();
+ new DirectorySampleStream(directory.toFile(), null, false);
+
+ File tempFile1 = directory.resolve("tempFile1").toFile();
FileUtils.writeStringToFile(tempFile1, sentence1);
-
- File tempFile2 = directory.newFile();
+
+ File tempFile2 = directory.resolve("tempFile2").toFile();
FileUtils.writeStringToFile(tempFile2, sentence2);
-
+
try (FileToStringSampleStream stream =
- new FileToStringSampleStream(directorySampleStream, Charset.defaultCharset())) {
+ new FileToStringSampleStream(directorySampleStream, Charset.defaultCharset())) {
String read = stream.read();
- Assert.assertTrue(sentences.contains(read));
+ Assertions.assertTrue(sentences.contains(read));
read = stream.read();
- Assert.assertTrue(sentences.contains(read));
+ Assertions.assertTrue(sentences.contains(read));
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/dictionary/DictionaryAsSetCaseInsensitiveTest.java b/opennlp-tools/src/test/java/opennlp/tools/dictionary/DictionaryAsSetCaseInsensitiveTest.java
index 8b975b50..f6413530 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/dictionary/DictionaryAsSetCaseInsensitiveTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/dictionary/DictionaryAsSetCaseInsensitiveTest.java
@@ -22,8 +22,8 @@ import java.util.Iterator;
import java.util.List;
import java.util.Set;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.util.StringList;
@@ -41,7 +41,7 @@ public class DictionaryAsSetCaseInsensitiveTest {
* Tests a basic lookup.
*/
@Test
- public void testLookup() {
+ void testLookup() {
String a = "a";
String b = "b";
@@ -52,17 +52,17 @@ public class DictionaryAsSetCaseInsensitiveTest {
Set<String> set = dict.asStringSet();
- Assert.assertTrue(set.contains(a));
- Assert.assertFalse(set.contains(b));
+ Assertions.assertTrue(set.contains(a));
+ Assertions.assertFalse(set.contains(b));
- Assert.assertTrue(set.contains(a.toUpperCase()));
+ Assertions.assertTrue(set.contains(a.toUpperCase()));
}
/**
* Tests set.
*/
@Test
- public void testSet() {
+ void testSet() {
String a = "a";
String a1 = "a";
@@ -74,15 +74,15 @@ public class DictionaryAsSetCaseInsensitiveTest {
Set<String> set = dict.asStringSet();
- Assert.assertTrue(set.contains(a));
- Assert.assertEquals(1, set.size());
+ Assertions.assertTrue(set.contains(a));
+ Assertions.assertEquals(1, set.size());
}
/**
* Tests set.
*/
@Test
- public void testSetDiffCase() {
+ void testSetDiffCase() {
String a = "a";
String a1 = "A";
@@ -94,15 +94,15 @@ public class DictionaryAsSetCaseInsensitiveTest {
Set<String> set = dict.asStringSet();
- Assert.assertTrue(set.contains(a));
- Assert.assertEquals(1, set.size());
+ Assertions.assertTrue(set.contains(a));
+ Assertions.assertEquals(1, set.size());
}
/**
* Tests for the {@link Dictionary#equals(Object)} method.
*/
@Test
- public void testEquals() {
+ void testEquals() {
String entry1 = "1a";
String entry2 = "1b";
@@ -118,14 +118,14 @@ public class DictionaryAsSetCaseInsensitiveTest {
Set<String> setB = dictB.asStringSet();
- Assert.assertTrue(setA.equals(setB));
+ Assertions.assertTrue(setA.equals(setB));
}
/**
* Tests for the {@link Dictionary#equals(Object)} method.
*/
@Test
- public void testEqualsDifferentCase() {
+ void testEqualsDifferentCase() {
Dictionary dictA = getDict();
dictA.put(asSL("1a"));
@@ -139,14 +139,14 @@ public class DictionaryAsSetCaseInsensitiveTest {
Set<String> setB = dictB.asStringSet();
- Assert.assertTrue(setA.equals(setB));
+ Assertions.assertTrue(setA.equals(setB));
}
/**
* Tests the {@link Dictionary#hashCode()} method.
*/
@Test
- public void testHashCode() {
+ void testHashCode() {
String entry1 = "a1";
Dictionary dictA = getDict();
@@ -159,14 +159,14 @@ public class DictionaryAsSetCaseInsensitiveTest {
Set<String> setB = dictB.asStringSet();
- Assert.assertEquals(setA.hashCode(), setB.hashCode());
+ Assertions.assertEquals(setA.hashCode(), setB.hashCode());
}
/**
* Tests the {@link Dictionary#hashCode()} method.
*/
@Test
- public void testHashCodeDifferentCase() {
+ void testHashCodeDifferentCase() {
String entry1 = "a1";
Dictionary dictA = getDict();
@@ -180,14 +180,14 @@ public class DictionaryAsSetCaseInsensitiveTest {
Set<String> setB = dictB.asStringSet();
// TODO: should it be equal??
- Assert.assertNotSame(setA.hashCode(), setB.hashCode());
+ Assertions.assertNotSame(setA.hashCode(), setB.hashCode());
}
/**
* Tests the lookup of tokens of different case.
*/
@Test
- public void testDifferentCaseLookup() {
+ void testDifferentCaseLookup() {
String entry1 = "1a";
String entry2 = "1A";
@@ -199,14 +199,14 @@ public class DictionaryAsSetCaseInsensitiveTest {
Set<String> set = dict.asStringSet();
- Assert.assertTrue(set.contains(entry2));
+ Assertions.assertTrue(set.contains(entry2));
}
/**
* Tests the iterator implementation
*/
@Test
- public void testIterator() {
+ void testIterator() {
String entry1 = "1a";
String entry2 = "1b";
@@ -223,9 +223,9 @@ public class DictionaryAsSetCaseInsensitiveTest {
elements.add(it.next());
}
- Assert.assertEquals(2, elements.size());
- Assert.assertTrue(elements.contains(entry1));
- Assert.assertTrue(elements.contains(entry2));
+ Assertions.assertEquals(2, elements.size());
+ Assertions.assertTrue(elements.contains(entry1));
+ Assertions.assertTrue(elements.contains(entry2));
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/dictionary/DictionaryAsSetCaseSensitiveTest.java b/opennlp-tools/src/test/java/opennlp/tools/dictionary/DictionaryAsSetCaseSensitiveTest.java
index ce0949fd..0515da8a 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/dictionary/DictionaryAsSetCaseSensitiveTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/dictionary/DictionaryAsSetCaseSensitiveTest.java
@@ -22,8 +22,8 @@ import java.util.Iterator;
import java.util.List;
import java.util.Set;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.util.StringList;
@@ -41,7 +41,7 @@ public class DictionaryAsSetCaseSensitiveTest {
* Tests a basic lookup.
*/
@Test
- public void testLookup() {
+ void testLookup() {
String a = "a";
String b = "b";
@@ -52,17 +52,17 @@ public class DictionaryAsSetCaseSensitiveTest {
Set<String> set = dict.asStringSet();
- Assert.assertTrue(set.contains(a));
- Assert.assertFalse(set.contains(b));
+ Assertions.assertTrue(set.contains(a));
+ Assertions.assertFalse(set.contains(b));
- Assert.assertFalse(set.contains(a.toUpperCase()));
+ Assertions.assertFalse(set.contains(a.toUpperCase()));
}
/**
* Tests set.
*/
@Test
- public void testSet() {
+ void testSet() {
String a = "a";
String a1 = "a";
@@ -74,15 +74,15 @@ public class DictionaryAsSetCaseSensitiveTest {
Set<String> set = dict.asStringSet();
- Assert.assertTrue(set.contains(a));
- Assert.assertEquals(1, set.size());
+ Assertions.assertTrue(set.contains(a));
+ Assertions.assertEquals(1, set.size());
}
/**
* Tests set.
*/
@Test
- public void testSetDiffCase() {
+ void testSetDiffCase() {
String a = "a";
String a1 = "A";
@@ -94,15 +94,15 @@ public class DictionaryAsSetCaseSensitiveTest {
Set<String> set = dict.asStringSet();
- Assert.assertTrue(set.contains(a));
- Assert.assertEquals(2, set.size());
+ Assertions.assertTrue(set.contains(a));
+ Assertions.assertEquals(2, set.size());
}
/**
* Tests for the {@link Dictionary#equals(Object)} method.
*/
@Test
- public void testEquals() {
+ void testEquals() {
String entry1 = "1a";
String entry2 = "1b";
@@ -118,14 +118,14 @@ public class DictionaryAsSetCaseSensitiveTest {
Set<String> setB = dictB.asStringSet();
- Assert.assertTrue(setA.equals(setB));
+ Assertions.assertTrue(setA.equals(setB));
}
/**
* Tests for the {@link Dictionary#equals(Object)} method.
*/
@Test
- public void testEqualsDifferentCase() {
+ void testEqualsDifferentCase() {
Dictionary dictA = getDict();
dictA.put(asSL("1a"));
@@ -140,14 +140,14 @@ public class DictionaryAsSetCaseSensitiveTest {
Set<String> setB = dictB.asStringSet();
// should fail in case sensitive dict
- Assert.assertFalse(setA.equals(setB));
+ Assertions.assertFalse(setA.equals(setB));
}
/**
* Tests the {@link Dictionary#hashCode()} method.
*/
@Test
- public void testHashCode() {
+ void testHashCode() {
String entry1 = "a1";
Dictionary dictA = getDict();
@@ -160,14 +160,14 @@ public class DictionaryAsSetCaseSensitiveTest {
Set<String> setB = dictB.asStringSet();
- Assert.assertEquals(setA.hashCode(), setB.hashCode());
+ Assertions.assertEquals(setA.hashCode(), setB.hashCode());
}
/**
* Tests the {@link Dictionary#hashCode()} method.
*/
@Test
- public void testHashCodeDifferentCase() {
+ void testHashCodeDifferentCase() {
String entry1 = "a1";
Dictionary dictA = getDict();
@@ -181,14 +181,14 @@ public class DictionaryAsSetCaseSensitiveTest {
Set<String> setB = dictB.asStringSet();
// TODO: should it be equal??
- Assert.assertNotSame(setA.hashCode(), setB.hashCode());
+ Assertions.assertNotSame(setA.hashCode(), setB.hashCode());
}
/**
* Tests the lookup of tokens of different case.
*/
@Test
- public void testDifferentCaseLookup() {
+ void testDifferentCaseLookup() {
String entry1 = "1a";
String entry2 = "1A";
@@ -201,14 +201,14 @@ public class DictionaryAsSetCaseSensitiveTest {
Set<String> set = dict.asStringSet();
// should return false because 1a != 1A in a case sensitive lookup
- Assert.assertFalse(set.contains(entry2));
+ Assertions.assertFalse(set.contains(entry2));
}
/**
* Tests the iterator implementation
*/
@Test
- public void testIterator() {
+ void testIterator() {
String entry1 = "1a";
String entry2 = "1b";
@@ -225,11 +225,11 @@ public class DictionaryAsSetCaseSensitiveTest {
elements.add(it.next());
}
- Assert.assertEquals(4, elements.size());
- Assert.assertTrue(elements.contains(entry1));
- Assert.assertTrue(elements.contains(entry2));
- Assert.assertTrue(elements.contains(entry1.toUpperCase()));
- Assert.assertTrue(elements.contains(entry2.toUpperCase()));
+ Assertions.assertEquals(4, elements.size());
+ Assertions.assertTrue(elements.contains(entry1));
+ Assertions.assertTrue(elements.contains(entry2));
+ Assertions.assertTrue(elements.contains(entry1.toUpperCase()));
+ Assertions.assertTrue(elements.contains(entry2.toUpperCase()));
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/dictionary/DictionaryTest.java b/opennlp-tools/src/test/java/opennlp/tools/dictionary/DictionaryTest.java
index 54e537fd..57237f53 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/dictionary/DictionaryTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/dictionary/DictionaryTest.java
@@ -22,8 +22,8 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.StringReader;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.util.InvalidFormatException;
import opennlp.tools.util.StringList;
@@ -51,7 +51,7 @@ public class DictionaryTest {
* Tests a basic lookup.
*/
@Test
- public void testLookup() {
+ void testLookup() {
StringList entry1 = new StringList("1a", "1b");
StringList entry1u = new StringList("1A", "1B");
@@ -61,16 +61,16 @@ public class DictionaryTest {
dict.put(entry1);
- Assert.assertTrue(dict.contains(entry1));
- Assert.assertTrue(dict.contains(entry1u));
- Assert.assertTrue(!dict.contains(entry2));
+ Assertions.assertTrue(dict.contains(entry1));
+ Assertions.assertTrue(dict.contains(entry1u));
+ Assertions.assertTrue(!dict.contains(entry2));
}
/**
* Test lookup with case sensitive dictionary
*/
@Test
- public void testLookupCaseSensitive() {
+ void testLookupCaseSensitive() {
StringList entry1 = new StringList("1a", "1b");
StringList entry1u = new StringList("1A", "1B");
StringList entry2 = new StringList("1A", "1C");
@@ -79,9 +79,9 @@ public class DictionaryTest {
dict.put(entry1);
- Assert.assertTrue(dict.contains(entry1));
- Assert.assertTrue(!dict.contains(entry1u));
- Assert.assertTrue(!dict.contains(entry2));
+ Assertions.assertTrue(dict.contains(entry1));
+ Assertions.assertTrue(!dict.contains(entry1u));
+ Assertions.assertTrue(!dict.contains(entry2));
}
/**
@@ -91,7 +91,7 @@ public class DictionaryTest {
* @throws InvalidFormatException
*/
@Test
- public void testSerialization() throws IOException {
+ void testSerialization() throws IOException {
Dictionary reference = getCaseInsensitive();
String a1 = "a1";
@@ -108,7 +108,7 @@ public class DictionaryTest {
Dictionary recreated = new Dictionary(
new ByteArrayInputStream(out.toByteArray()));
- Assert.assertTrue(reference.equals(recreated));
+ Assertions.assertTrue(reference.equals(recreated));
}
/**
@@ -118,25 +118,25 @@ public class DictionaryTest {
* @throws IOException
*/
@Test
- public void testParseOneEntryPerLine() throws IOException {
+ void testParseOneEntryPerLine() throws IOException {
String testDictionary = "1a 1b 1c 1d \n 2a 2b 2c \n 3a \n 4a 4b ";
Dictionary dictionay =
Dictionary.parseOneEntryPerLine(new StringReader(testDictionary));
- Assert.assertTrue(dictionay.size() == 4);
- Assert.assertTrue(dictionay.contains(new StringList("1a", "1b", "1c", "1d")));
- Assert.assertTrue(dictionay.contains(new StringList("2a", "2b", "2c")));
- Assert.assertTrue(dictionay.contains(new StringList(new String[]{"3a"})));
- Assert.assertTrue(dictionay.contains(new StringList("4a", "4b")));
+ Assertions.assertTrue(dictionay.size() == 4);
+ Assertions.assertTrue(dictionay.contains(new StringList("1a", "1b", "1c", "1d")));
+ Assertions.assertTrue(dictionay.contains(new StringList("2a", "2b", "2c")));
+ Assertions.assertTrue(dictionay.contains(new StringList(new String[] {"3a"})));
+ Assertions.assertTrue(dictionay.contains(new StringList("4a", "4b")));
}
/**
* Tests for the {@link Dictionary#equals(Object)} method.
*/
@Test
- public void testEquals() {
+ void testEquals() {
StringList entry1 = new StringList("1a", "1b");
StringList entry2 = new StringList("2a", "2b");
@@ -152,16 +152,16 @@ public class DictionaryTest {
dictC.put(entry1);
dictC.put(entry2);
- Assert.assertTrue(dictA.equals(dictB));
- Assert.assertTrue(dictC.equals(dictA));
- Assert.assertTrue(dictB.equals(dictC));
+ Assertions.assertTrue(dictA.equals(dictB));
+ Assertions.assertTrue(dictC.equals(dictA));
+ Assertions.assertTrue(dictB.equals(dictC));
}
/**
* Tests the {@link Dictionary#hashCode()} method.
*/
@Test
- public void testHashCode() {
+ void testHashCode() {
StringList entry1 = new StringList("1a", "1b");
StringList entry2 = new StringList("1A", "1B");
@@ -177,16 +177,16 @@ public class DictionaryTest {
Dictionary dictD = getCaseSensitive();
dictD.put(entry2);
- Assert.assertEquals(dictA.hashCode(), dictB.hashCode());
- Assert.assertEquals(dictB.hashCode(), dictC.hashCode());
- Assert.assertEquals(dictC.hashCode(), dictD.hashCode());
+ Assertions.assertEquals(dictA.hashCode(), dictB.hashCode());
+ Assertions.assertEquals(dictB.hashCode(), dictC.hashCode());
+ Assertions.assertEquals(dictC.hashCode(), dictD.hashCode());
}
/**
* Tests for the {@link Dictionary#toString()} method.
*/
@Test
- public void testToString() {
+ void testToString() {
StringList entry1 = new StringList("1a", "1b");
Dictionary dictA = getCaseInsensitive();
@@ -202,7 +202,7 @@ public class DictionaryTest {
* Tests the lookup of tokens of different case.
*/
@Test
- public void testDifferentCaseLookup() {
+ void testDifferentCaseLookup() {
StringList entry1 = new StringList("1a", "1b");
StringList entry2 = new StringList("1A", "1B");
@@ -211,14 +211,14 @@ public class DictionaryTest {
dict.put(entry1);
- Assert.assertTrue(dict.contains(entry2));
+ Assertions.assertTrue(dict.contains(entry2));
}
/**
* Tests the lookup of tokens of different case.
*/
@Test
- public void testDifferentCaseLookupCaseSensitive() {
+ void testDifferentCaseLookupCaseSensitive() {
StringList entry1 = new StringList("1a", "1b");
StringList entry2 = new StringList("1A", "1B");
@@ -227,7 +227,7 @@ public class DictionaryTest {
dict.put(entry1);
- Assert.assertTrue(!dict.contains(entry2));
+ Assertions.assertTrue(!dict.contains(entry2));
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/doccat/BagOfWordsFeatureGeneratorTest.java b/opennlp-tools/src/test/java/opennlp/tools/doccat/BagOfWordsFeatureGeneratorTest.java
index 2b128d94..c0037938 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/doccat/BagOfWordsFeatureGeneratorTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/doccat/BagOfWordsFeatureGeneratorTest.java
@@ -19,44 +19,43 @@ package opennlp.tools.doccat;
import java.util.Collections;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
public class BagOfWordsFeatureGeneratorTest {
@Test
- public void testNull() {
+ void testNull() {
BagOfWordsFeatureGenerator generator = new BagOfWordsFeatureGenerator();
try {
generator.extractFeatures(null, Collections.emptyMap());
- Assert.fail("NullPointerException must be thrown");
- }
- catch (NullPointerException expected) {
+ Assertions.fail("NullPointerException must be thrown");
+ } catch (NullPointerException expected) {
}
}
@Test
- public void testEmpty() {
+ void testEmpty() {
BagOfWordsFeatureGenerator generator = new BagOfWordsFeatureGenerator();
- Assert.assertEquals(0, generator.extractFeatures(new String[]{}, Collections.emptyMap()).size());
+ Assertions.assertEquals(0, generator.extractFeatures(new String[] {}, Collections.emptyMap()).size());
}
@Test
- public void testUseAllTokens() {
+ void testUseAllTokens() {
BagOfWordsFeatureGenerator generator = new BagOfWordsFeatureGenerator();
- Assert.assertArrayEquals(new String[]{"bow=it", "bow=is", "bow=12.345", "bow=feet", "bow=long"},
- generator.extractFeatures(new String[]{"it", "is", "12.345", "feet", "long"},
+ Assertions.assertArrayEquals(new String[] {"bow=it", "bow=is", "bow=12.345", "bow=feet", "bow=long"},
+ generator.extractFeatures(new String[] {"it", "is", "12.345", "feet", "long"},
Collections.emptyMap()).toArray());
}
@Test
- public void testOnlyLetterTokens() {
+ void testOnlyLetterTokens() {
BagOfWordsFeatureGenerator generator = new BagOfWordsFeatureGenerator(true);
- Assert.assertArrayEquals(new String[]{"bow=it", "bow=is", "bow=feet", "bow=long"},
- generator.extractFeatures(new String[]{"it", "is", "12.345", "feet", "long"},
- Collections.emptyMap()).toArray());
+ Assertions.assertArrayEquals(new String[] {"bow=it", "bow=is", "bow=feet", "bow=long"},
+ generator.extractFeatures(new String[] {"it", "is", "12.345", "feet", "long"},
+ Collections.emptyMap()).toArray());
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/doccat/DoccatFactoryTest.java b/opennlp-tools/src/test/java/opennlp/tools/doccat/DoccatFactoryTest.java
index 2b247a17..2139b4ab 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/doccat/DoccatFactoryTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/doccat/DoccatFactoryTest.java
@@ -22,8 +22,8 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.util.InputStreamFactory;
@@ -56,10 +56,10 @@ public class DoccatFactoryTest {
}
@Test
- public void testDefault() throws IOException {
+ void testDefault() throws IOException {
DoccatModel model = train();
- Assert.assertNotNull(model);
+ Assertions.assertNotNull(model);
ByteArrayOutputStream out = new ByteArrayOutputStream();
model.serialize(out);
@@ -69,24 +69,24 @@ public class DoccatFactoryTest {
DoccatFactory factory = fromSerialized.getFactory();
- Assert.assertNotNull(factory);
+ Assertions.assertNotNull(factory);
- Assert.assertEquals(1, factory.getFeatureGenerators().length);
- Assert.assertEquals(BagOfWordsFeatureGenerator.class,
+ Assertions.assertEquals(1, factory.getFeatureGenerators().length);
+ Assertions.assertEquals(BagOfWordsFeatureGenerator.class,
factory.getFeatureGenerators()[0].getClass());
}
@Test
- public void testCustom() throws IOException {
- FeatureGenerator[] featureGenerators = { new BagOfWordsFeatureGenerator(),
- new NGramFeatureGenerator(), new NGramFeatureGenerator(2,3) };
+ void testCustom() throws IOException {
+ FeatureGenerator[] featureGenerators = {new BagOfWordsFeatureGenerator(),
+ new NGramFeatureGenerator(), new NGramFeatureGenerator(2, 3)};
DoccatFactory factory = new DoccatFactory(featureGenerators);
DoccatModel model = train(factory);
- Assert.assertNotNull(model);
+ Assertions.assertNotNull(model);
ByteArrayOutputStream out = new ByteArrayOutputStream();
model.serialize(out);
@@ -96,14 +96,14 @@ public class DoccatFactoryTest {
factory = fromSerialized.getFactory();
- Assert.assertNotNull(factory);
+ Assertions.assertNotNull(factory);
- Assert.assertEquals(3, factory.getFeatureGenerators().length);
- Assert.assertEquals(BagOfWordsFeatureGenerator.class,
+ Assertions.assertEquals(3, factory.getFeatureGenerators().length);
+ Assertions.assertEquals(BagOfWordsFeatureGenerator.class,
factory.getFeatureGenerators()[0].getClass());
- Assert.assertEquals(NGramFeatureGenerator.class,
+ Assertions.assertEquals(NGramFeatureGenerator.class,
factory.getFeatureGenerators()[1].getClass());
- Assert.assertEquals(NGramFeatureGenerator.class,factory.getFeatureGenerators()[2].getClass());
+ Assertions.assertEquals(NGramFeatureGenerator.class, factory.getFeatureGenerators()[2].getClass());
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentCategorizerMETest.java b/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentCategorizerMETest.java
index 5e8ddaf5..b2941ad0 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentCategorizerMETest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentCategorizerMETest.java
@@ -21,8 +21,8 @@ import java.io.IOException;
import java.util.Set;
import java.util.SortedMap;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.util.InsufficientTrainingDataException;
import opennlp.tools.util.ObjectStream;
@@ -32,50 +32,55 @@ import opennlp.tools.util.TrainingParameters;
public class DocumentCategorizerMETest {
@Test
- public void testSimpleTraining() throws IOException {
+ void testSimpleTraining() throws IOException {
ObjectStream<DocumentSample> samples = ObjectStreamUtils.createObjectStream(
- new DocumentSample("1", new String[]{"a", "b", "c"}),
- new DocumentSample("1", new String[]{"a", "b", "c", "1", "2"}),
- new DocumentSample("1", new String[]{"a", "b", "c", "3", "4"}),
- new DocumentSample("0", new String[]{"x", "y", "z"}),
- new DocumentSample("0", new String[]{"x", "y", "z", "5", "6"}),
- new DocumentSample("0", new String[]{"x", "y", "z", "7", "8"}));
+ new DocumentSample("1", new String[] {"a", "b", "c"}),
+ new DocumentSample("1", new String[] {"a", "b", "c", "1", "2"}),
+ new DocumentSample("1", new String[] {"a", "b", "c", "3", "4"}),
+ new DocumentSample("0", new String[] {"x", "y", "z"}),
+ new DocumentSample("0", new String[] {"x", "y", "z", "5", "6"}),
+ new DocumentSample("0", new String[] {"x", "y", "z", "7", "8"}));
TrainingParameters params = new TrainingParameters();
params.put(TrainingParameters.ITERATIONS_PARAM, 100);
params.put(TrainingParameters.CUTOFF_PARAM, 0);
DoccatModel model = DocumentCategorizerME.train("x-unspecified", samples,
- params, new DoccatFactory());
+ params, new DoccatFactory());
DocumentCategorizer doccat = new DocumentCategorizerME(model);
- double[] aProbs = doccat.categorize(new String[]{"a"});
- Assert.assertEquals("1", doccat.getBestCategory(aProbs));
+ double[] aProbs = doccat.categorize(new String[] {"a"});
+ Assertions.assertEquals("1", doccat.getBestCategory(aProbs));
- double[] bProbs = doccat.categorize(new String[]{"x"});
- Assert.assertEquals("0", doccat.getBestCategory(bProbs));
+ double[] bProbs = doccat.categorize(new String[] {"x"});
+ Assertions.assertEquals("0", doccat.getBestCategory(bProbs));
//test to make sure sorted map's last key is cat 1 because it has the highest score.
- SortedMap<Double, Set<String>> sortedScoreMap = doccat.sortedScoreMap(new String[]{"a"});
+ SortedMap<Double, Set<String>> sortedScoreMap = doccat.sortedScoreMap(new String[] {"a"});
Set<String> cat = sortedScoreMap.get(sortedScoreMap.lastKey());
- Assert.assertEquals(1, cat.size());
+ Assertions.assertEquals(1, cat.size());
}
-
- @Test(expected = InsufficientTrainingDataException.class)
- public void insufficientTestData() throws IOException {
- ObjectStream<DocumentSample> samples = ObjectStreamUtils.createObjectStream(
- new DocumentSample("1", new String[]{"a", "b", "c"}));
+ @Test
+ void insufficientTestData() {
- TrainingParameters params = new TrainingParameters();
- params.put(TrainingParameters.ITERATIONS_PARAM, 100);
- params.put(TrainingParameters.CUTOFF_PARAM, 0);
+ Assertions.assertThrows(InsufficientTrainingDataException.class, () -> {
+
+ ObjectStream<DocumentSample> samples = ObjectStreamUtils.createObjectStream(
+ new DocumentSample("1", new String[] {"a", "b", "c"}));
+
+ TrainingParameters params = new TrainingParameters();
+ params.put(TrainingParameters.ITERATIONS_PARAM, 100);
+ params.put(TrainingParameters.CUTOFF_PARAM, 0);
+
+ DocumentCategorizerME.train("x-unspecified", samples,
+ params, new DoccatFactory());
+
+ });
- DocumentCategorizerME.train("x-unspecified", samples,
- params, new DoccatFactory());
}
-
+
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentCategorizerNBTest.java b/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentCategorizerNBTest.java
index 1c96a367..4c3fd562 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentCategorizerNBTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentCategorizerNBTest.java
@@ -21,8 +21,8 @@ import java.io.IOException;
import java.util.Set;
import java.util.SortedMap;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.ml.AbstractTrainer;
import opennlp.tools.ml.naivebayes.NaiveBayesTrainer;
@@ -33,15 +33,15 @@ import opennlp.tools.util.TrainingParameters;
public class DocumentCategorizerNBTest {
@Test
- public void testSimpleTraining() throws IOException {
+ void testSimpleTraining() throws IOException {
ObjectStream<DocumentSample> samples = ObjectStreamUtils.createObjectStream(
- new DocumentSample("1", new String[]{"a", "b", "c"}),
- new DocumentSample("1", new String[]{"a", "b", "c", "1", "2"}),
- new DocumentSample("1", new String[]{"a", "b", "c", "3", "4"}),
- new DocumentSample("0", new String[]{"x", "y", "z"}),
- new DocumentSample("0", new String[]{"x", "y", "z", "5", "6"}),
- new DocumentSample("0", new String[]{"x", "y", "z", "7", "8"}));
+ new DocumentSample("1", new String[] {"a", "b", "c"}),
+ new DocumentSample("1", new String[] {"a", "b", "c", "1", "2"}),
+ new DocumentSample("1", new String[] {"a", "b", "c", "3", "4"}),
+ new DocumentSample("0", new String[] {"x", "y", "z"}),
+ new DocumentSample("0", new String[] {"x", "y", "z", "5", "6"}),
+ new DocumentSample("0", new String[] {"x", "y", "z", "7", "8"}));
TrainingParameters params = new TrainingParameters();
params.put(TrainingParameters.ITERATIONS_PARAM, 100);
@@ -53,16 +53,16 @@ public class DocumentCategorizerNBTest {
DocumentCategorizer doccat = new DocumentCategorizerME(model);
- double[] aProbs = doccat.categorize(new String[]{"a"});
- Assert.assertEquals("1", doccat.getBestCategory(aProbs));
+ double[] aProbs = doccat.categorize(new String[] {"a"});
+ Assertions.assertEquals("1", doccat.getBestCategory(aProbs));
- double[] bProbs = doccat.categorize(new String[]{"x"});
- Assert.assertEquals("0", doccat.getBestCategory(bProbs));
+ double[] bProbs = doccat.categorize(new String[] {"x"});
+ Assertions.assertEquals("0", doccat.getBestCategory(bProbs));
//test to make sure sorted map's last key is cat 1 because it has the highest score.
- SortedMap<Double, Set<String>> sortedScoreMap = doccat.sortedScoreMap(new String[]{"a"});
+ SortedMap<Double, Set<String>> sortedScoreMap = doccat.sortedScoreMap(new String[] {"a"});
Set<String> cat = sortedScoreMap.get(sortedScoreMap.lastKey());
- Assert.assertEquals(1, cat.size());
+ Assertions.assertEquals(1, cat.size());
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentSampleTest.java b/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentSampleTest.java
index 2e7bfb7d..04f52424 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentSampleTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentSampleTest.java
@@ -25,21 +25,21 @@ import java.io.ObjectInputStream;
import java.io.ObjectOutput;
import java.io.ObjectOutputStream;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
public class DocumentSampleTest {
@Test
- public void testEquals() {
- Assert.assertFalse(createGoldSample() == createGoldSample());
- Assert.assertTrue(createGoldSample().equals(createGoldSample()));
- Assert.assertFalse(createPredSample().equals(createGoldSample()));
- Assert.assertFalse(createPredSample().equals(new Object()));
+ void testEquals() {
+ Assertions.assertFalse(createGoldSample() == createGoldSample());
+ Assertions.assertTrue(createGoldSample().equals(createGoldSample()));
+ Assertions.assertFalse(createPredSample().equals(createGoldSample()));
+ Assertions.assertFalse(createPredSample().equals(new Object()));
}
@Test
- public void testDocumentSampleSerDe() throws IOException {
+ void testDocumentSampleSerDe() throws IOException {
DocumentSample documentSample = createGoldSample();
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
ObjectOutput out = new ObjectOutputStream(byteArrayOutputStream);
@@ -57,17 +57,17 @@ public class DocumentSampleTest {
// do nothing
}
- Assert.assertNotNull(deSerializedDocumentSample);
- Assert.assertEquals(documentSample.getCategory(), deSerializedDocumentSample.getCategory());
- Assert.assertArrayEquals(documentSample.getText(), deSerializedDocumentSample.getText());
+ Assertions.assertNotNull(deSerializedDocumentSample);
+ Assertions.assertEquals(documentSample.getCategory(), deSerializedDocumentSample.getCategory());
+ Assertions.assertArrayEquals(documentSample.getText(), deSerializedDocumentSample.getText());
}
public static DocumentSample createGoldSample() {
- return new DocumentSample("aCategory", new String[]{"a", "small", "text"});
+ return new DocumentSample("aCategory", new String[] {"a", "small", "text"});
}
public static DocumentSample createPredSample() {
- return new DocumentSample("anotherCategory", new String[]{"a", "small", "text"});
+ return new DocumentSample("anotherCategory", new String[] {"a", "small", "text"});
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/doccat/NGramFeatureGeneratorTest.java b/opennlp-tools/src/test/java/opennlp/tools/doccat/NGramFeatureGeneratorTest.java
index 0aef3ea4..a108eb13 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/doccat/NGramFeatureGeneratorTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/doccat/NGramFeatureGeneratorTest.java
@@ -19,111 +19,108 @@ package opennlp.tools.doccat;
import java.util.Collections;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.util.InvalidFormatException;
public class NGramFeatureGeneratorTest {
- static final String[] TOKENS = new String[]{"a", "b", "c", "d", "e", "f", "g"};
+ static final String[] TOKENS = new String[] {"a", "b", "c", "d", "e", "f", "g"};
@Test
- public void testNull() throws Exception {
+ void testNull() throws Exception {
NGramFeatureGenerator generator = new NGramFeatureGenerator();
try {
generator.extractFeatures(null, Collections.emptyMap());
- Assert.fail("NullPointerException must be thrown");
- }
- catch (NullPointerException expected) {
+ Assertions.fail("NullPointerException must be thrown");
+ } catch (NullPointerException expected) {
}
}
@Test
- public void testEmpty() throws Exception {
+ void testEmpty() throws Exception {
NGramFeatureGenerator generator = new NGramFeatureGenerator();
- Assert.assertEquals(0, generator.extractFeatures(new String[]{}, Collections.emptyMap()).size());
+ Assertions.assertEquals(0, generator.extractFeatures(new String[] {}, Collections.emptyMap()).size());
}
@Test
- public void testInvalidGramSize1() {
+ void testInvalidGramSize1() {
try {
new NGramFeatureGenerator(0, 1);
- Assert.fail("InvalidFormatException must be thrown");
- }
- catch (InvalidFormatException expected) {
+ Assertions.fail("InvalidFormatException must be thrown");
+ } catch (InvalidFormatException expected) {
}
}
@Test
- public void testInvalidGramSize2() {
+ void testInvalidGramSize2() {
try {
new NGramFeatureGenerator(2, 1);
- Assert.fail("InvalidFormatException must be thrown");
- }
- catch (InvalidFormatException expected) {
+ Assertions.fail("InvalidFormatException must be thrown");
+ } catch (InvalidFormatException expected) {
}
}
@Test
- public void testUnigram() throws Exception {
+ void testUnigram() throws Exception {
NGramFeatureGenerator generator = new NGramFeatureGenerator(1, 1);
- Assert.assertArrayEquals(
- new String[]{"ng=:a", "ng=:b", "ng=:c", "ng=:d", "ng=:e", "ng=:f", "ng=:g"},
+ Assertions.assertArrayEquals(
+ new String[] {"ng=:a", "ng=:b", "ng=:c", "ng=:d", "ng=:e", "ng=:f", "ng=:g"},
generator.extractFeatures(TOKENS, Collections.emptyMap()).toArray());
}
@Test
- public void testBigram() throws Exception {
+ void testBigram() throws Exception {
NGramFeatureGenerator generator = new NGramFeatureGenerator(2, 2);
- Assert.assertArrayEquals(
- new String[]{"ng=:a:b", "ng=:b:c", "ng=:c:d", "ng=:d:e", "ng=:e:f", "ng=:f:g"},
+ Assertions.assertArrayEquals(
+ new String[] {"ng=:a:b", "ng=:b:c", "ng=:c:d", "ng=:d:e", "ng=:e:f", "ng=:f:g"},
generator.extractFeatures(TOKENS, Collections.emptyMap()).toArray());
}
@Test
- public void testTrigram() throws Exception {
+ void testTrigram() throws Exception {
NGramFeatureGenerator generator = new NGramFeatureGenerator(3, 3);
- Assert.assertArrayEquals(
- new String[]{"ng=:a:b:c", "ng=:b:c:d", "ng=:c:d:e", "ng=:d:e:f", "ng=:e:f:g"},
+ Assertions.assertArrayEquals(
+ new String[] {"ng=:a:b:c", "ng=:b:c:d", "ng=:c:d:e", "ng=:d:e:f", "ng=:e:f:g"},
generator.extractFeatures(TOKENS, Collections.emptyMap()).toArray());
}
@Test
- public void test12gram() throws Exception {
+ void test12gram() throws Exception {
NGramFeatureGenerator generator = new NGramFeatureGenerator(1, 2);
- Assert.assertArrayEquals(
- new String[]{
- "ng=:a", "ng=:a:b",
- "ng=:b", "ng=:b:c",
- "ng=:c", "ng=:c:d",
- "ng=:d", "ng=:d:e",
- "ng=:e", "ng=:e:f",
- "ng=:f", "ng=:f:g",
- "ng=:g"
- },
+ Assertions.assertArrayEquals(
+ new String[] {
+ "ng=:a", "ng=:a:b",
+ "ng=:b", "ng=:b:c",
+ "ng=:c", "ng=:c:d",
+ "ng=:d", "ng=:d:e",
+ "ng=:e", "ng=:e:f",
+ "ng=:f", "ng=:f:g",
+ "ng=:g"
+ },
generator.extractFeatures(TOKENS, Collections.emptyMap()).toArray());
}
@Test
- public void test13gram() throws Exception {
+ void test13gram() throws Exception {
NGramFeatureGenerator generator = new NGramFeatureGenerator(1, 3);
- Assert.assertArrayEquals(
- new String[]{
- "ng=:a", "ng=:a:b", "ng=:a:b:c",
- "ng=:b", "ng=:b:c", "ng=:b:c:d",
- "ng=:c", "ng=:c:d", "ng=:c:d:e",
- "ng=:d", "ng=:d:e", "ng=:d:e:f",
- "ng=:e", "ng=:e:f", "ng=:e:f:g",
- "ng=:f", "ng=:f:g",
- "ng=:g"
- },
+ Assertions.assertArrayEquals(
+ new String[] {
+ "ng=:a", "ng=:a:b", "ng=:a:b:c",
+ "ng=:b", "ng=:b:c", "ng=:b:c:d",
+ "ng=:c", "ng=:c:d", "ng=:c:d:e",
+ "ng=:d", "ng=:d:e", "ng=:d:e:f",
+ "ng=:e", "ng=:e:f", "ng=:e:f:g",
+ "ng=:f", "ng=:f:g",
+ "ng=:g"
+ },
generator.extractFeatures(TOKENS, Collections.emptyMap()).toArray());
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/eval/AbstractEvalTest.java b/opennlp-tools/src/test/java/opennlp/tools/eval/AbstractEvalTest.java
index 5f8865b4..e65e17b5 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/eval/AbstractEvalTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/eval/AbstractEvalTest.java
@@ -29,7 +29,7 @@ import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
-import org.junit.Assert;
+import org.junit.jupiter.api.Assertions;
import opennlp.tools.ml.maxent.quasinewton.QNTrainer;
import opennlp.tools.ml.naivebayes.NaiveBayesTrainer;
@@ -55,10 +55,10 @@ public abstract class AbstractEvalTest {
samples.close();
- Assert.assertEquals(checksum, new BigInteger(1, digest.digest()));
+ Assertions.assertEquals(checksum, new BigInteger(1, digest.digest()));
}
-
+
public static void verifyFileChecksum(Path file, BigInteger checksum) throws Exception {
MessageDigest digest = MessageDigest.getInstance(HASH_ALGORITHM);
@@ -71,14 +71,14 @@ public abstract class AbstractEvalTest {
}
}
- Assert.assertEquals(checksum, new BigInteger(1, digest.digest()));
+ Assertions.assertEquals(checksum, new BigInteger(1, digest.digest()));
}
-
+
public static void verifyDirectoryChecksum(Path path, String extension, BigInteger checksum)
throws Exception {
MessageDigest digest = MessageDigest.getInstance(HASH_ALGORITHM);
-
+
final List<Path> paths = Files.walk(path)
.filter(Files::isRegularFile)
.filter(p -> p.toString().endsWith(extension))
@@ -87,7 +87,7 @@ public abstract class AbstractEvalTest {
// Ensure the paths are in a consistent order when
// verifying the file checksums.
Collections.sort(paths);
-
+
for (Path p : paths) {
try (InputStream in = Files.newInputStream(p)) {
byte[] buf = new byte[65536];
@@ -98,8 +98,8 @@ public abstract class AbstractEvalTest {
}
}
- Assert.assertEquals(checksum, new BigInteger(1, digest.digest()));
- }
+ Assertions.assertEquals(checksum, new BigInteger(1, digest.digest()));
+ }
public static File getOpennlpDataDir() throws FileNotFoundException {
final String dataDirectory = System.getProperty("OPENNLP_DATA_DIR");
diff --git a/opennlp-tools/src/test/java/opennlp/tools/eval/ArvoresDeitadasEval.java b/opennlp-tools/src/test/java/opennlp/tools/eval/ArvoresDeitadasEval.java
index a080e4ee..19280a52 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/eval/ArvoresDeitadasEval.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/eval/ArvoresDeitadasEval.java
@@ -22,9 +22,9 @@ import java.io.IOException;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import opennlp.tools.chunker.ChunkerCrossValidator;
import opennlp.tools.chunker.ChunkerFactory;
@@ -75,20 +75,20 @@ public class ArvoresDeitadasEval extends AbstractEvalTest {
return new PlainTextByLineStream(new MarkableFileInputStreamFactory(
new File(getOpennlpDataDir(), corpus)), StandardCharsets.ISO_8859_1);
}
-
- @BeforeClass
- public static void verifyTrainingData() throws Exception {
+
+ @BeforeAll
+ static void verifyTrainingData() throws Exception {
verifyTrainingData(new ADSentenceSampleStream(getLineSample(BOSQUE), false),
new BigInteger("140568367548727787313497336739085858596"));
verifyTrainingData(new ADSentenceSampleStream(getLineSample(FLORESTA_VIRGEM), false),
new BigInteger("2614161133949079191933514776652602918"));
-
+
}
private void sentenceCrossEval(TrainingParameters params,
- double expectedScore) throws IOException {
+ double expectedScore) throws IOException {
ADSentenceSampleStream samples = new ADSentenceSampleStream(
getLineSample(FLORESTA_VIRGEM), false);
@@ -100,11 +100,11 @@ public class ArvoresDeitadasEval extends AbstractEvalTest {
cv.evaluate(samples, 10);
System.out.println(cv.getFMeasure());
- Assert.assertEquals(expectedScore, cv.getFMeasure().getFMeasure(), 0.0001d);
+ Assertions.assertEquals(expectedScore, cv.getFMeasure().getFMeasure(), 0.0001d);
}
private void tokenizerCrossEval(TrainingParameters params,
- double expectedScore) throws IOException {
+ double expectedScore) throws IOException {
ObjectStream<NameSample> nameSamples = new ADNameSampleStream(
getLineSample(FLORESTA_VIRGEM), true);
@@ -125,12 +125,11 @@ public class ArvoresDeitadasEval extends AbstractEvalTest {
validator.evaluate(samples, 10);
System.out.println(validator.getFMeasure());
- Assert.assertEquals(expectedScore, validator.getFMeasure().getFMeasure(),
- 0.0001d);
+ Assertions.assertEquals(expectedScore, validator.getFMeasure().getFMeasure(), 0.0001d);
}
private void chunkerCrossEval(TrainingParameters params,
- double expectedScore) throws IOException {
+ double expectedScore) throws IOException {
ADChunkSampleStream samples = new ADChunkSampleStream(getLineSample(BOSQUE));
@@ -138,83 +137,83 @@ public class ArvoresDeitadasEval extends AbstractEvalTest {
new ChunkerFactory());
cv.evaluate(samples, 10);
- Assert.assertEquals(expectedScore, cv.getFMeasure().getFMeasure(), 0.0001d);
+ Assertions.assertEquals(expectedScore, cv.getFMeasure().getFMeasure(), 0.0001d);
}
@Test
- public void evalPortugueseSentenceDetectorPerceptron() throws IOException {
+ void evalPortugueseSentenceDetectorPerceptron() throws IOException {
sentenceCrossEval(createPerceptronParams(), 0.9892778840089301d);
}
@Test
- public void evalPortugueseSentenceDetectorGis() throws IOException {
+ void evalPortugueseSentenceDetectorGis() throws IOException {
sentenceCrossEval(ModelUtil.createDefaultTrainingParameters(), 0.987270070655111d);
}
@Test
- public void evalPortugueseSentenceDetectorMaxentQn() throws IOException {
+ void evalPortugueseSentenceDetectorMaxentQn() throws IOException {
sentenceCrossEval(createMaxentQnParams(), 0.9924715809679968d);
}
@Test
- public void evalPortugueseSentenceDetectorNaiveBayes() throws IOException {
+ void evalPortugueseSentenceDetectorNaiveBayes() throws IOException {
sentenceCrossEval(createNaiveBayesParams(), 0.9672196206048099d);
}
@Test
- public void evalPortugueseTokenizerPerceptron() throws IOException {
+ void evalPortugueseTokenizerPerceptron() throws IOException {
tokenizerCrossEval(createPerceptronParams(), 0.9994887308380267d);
}
@Test
- public void evalPortugueseTokenizerGis() throws IOException {
+ void evalPortugueseTokenizerGis() throws IOException {
tokenizerCrossEval(ModelUtil.createDefaultTrainingParameters(), 0.9992539405481062d);
}
@Test
- public void evalPortugueseTokenizerMaxentQn() throws IOException {
+ void evalPortugueseTokenizerMaxentQn() throws IOException {
tokenizerCrossEval(createMaxentQnParams(), 0.9996017148748251d);
}
@Test
- public void evalPortugueseTokenizerNaiveBayes() throws IOException {
+ void evalPortugueseTokenizerNaiveBayes() throws IOException {
tokenizerCrossEval(createNaiveBayesParams(), 0.9962358244502717d);
}
@Test
- public void evalPortugueseTokenizerMaxentQnMultipleThreads() throws IOException {
+ void evalPortugueseTokenizerMaxentQnMultipleThreads() throws IOException {
TrainingParameters params = createMaxentQnParams();
params.put("Threads", 4);
tokenizerCrossEval(params, 0.9996017148748251d);
}
@Test
- public void evalPortugueseChunkerPerceptron() throws IOException {
+ void evalPortugueseChunkerPerceptron() throws IOException {
chunkerCrossEval(createPerceptronParams(),
0.9638122825015589d);
}
@Test
- public void evalPortugueseChunkerGis() throws IOException {
+ void evalPortugueseChunkerGis() throws IOException {
chunkerCrossEval(ModelUtil.createDefaultTrainingParameters(),
0.9573860781121228d);
}
@Test
- public void evalPortugueseChunkerGisMultipleThreads() throws IOException {
+ void evalPortugueseChunkerGisMultipleThreads() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
params.put("Threads", 4);
chunkerCrossEval(params, 0.9573860781121228d);
}
@Test
- public void evalPortugueseChunkerQn() throws IOException {
+ void evalPortugueseChunkerQn() throws IOException {
chunkerCrossEval(createMaxentQnParams(),
0.9648211936491359d);
}
@Test
- public void evalPortugueseChunkerQnMultipleThreads() throws IOException {
+ void evalPortugueseChunkerQnMultipleThreads() throws IOException {
TrainingParameters params = createMaxentQnParams();
params.put("Threads", 4);
@@ -223,7 +222,7 @@ public class ArvoresDeitadasEval extends AbstractEvalTest {
}
@Test
- public void evalPortugueseChunkerNaiveBayes() throws IOException {
+ void evalPortugueseChunkerNaiveBayes() throws IOException {
chunkerCrossEval(createNaiveBayesParams(), 0.9041507736043933d);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/eval/Conll00ChunkerEval.java b/opennlp-tools/src/test/java/opennlp/tools/eval/Conll00ChunkerEval.java
index 80a0a74e..3af4f57a 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/eval/Conll00ChunkerEval.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/eval/Conll00ChunkerEval.java
@@ -22,10 +22,9 @@ import java.io.IOException;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import opennlp.tools.HighMemoryUsage;
import opennlp.tools.chunker.ChunkSample;
@@ -49,9 +48,9 @@ import opennlp.tools.util.model.ModelUtil;
*/
public class Conll00ChunkerEval extends AbstractEvalTest {
- private static File TEST_DATA_FILE;
+ private static File TEST_DATA_FILE;
private static File TRAIN_DATA_FILE;
-
+
private static ChunkerModel train(File trainFile, TrainingParameters params)
throws IOException {
@@ -70,37 +69,36 @@ public class Conll00ChunkerEval extends AbstractEvalTest {
ChunkerEvaluator evaluator = new ChunkerEvaluator(new ChunkerME(model));
evaluator.evaluate(samples);
- Assert.assertEquals(expectedFMeasure,
- evaluator.getFMeasure().getFMeasure(), 0.0001);
+ Assertions.assertEquals(expectedFMeasure, evaluator.getFMeasure().getFMeasure(), 0.0001);
}
-
- @BeforeClass
- public static void verifyTrainingData() throws Exception {
-
+
+ @BeforeAll
+ static void verifyTrainingData() throws Exception {
+
TEST_DATA_FILE = new File(getOpennlpDataDir(), "conll00/test.txt");
TRAIN_DATA_FILE = new File(getOpennlpDataDir(), "conll00/train.txt");
verifyTrainingData(new ChunkSampleStream(
new PlainTextByLineStream(new MarkableFileInputStreamFactory(TEST_DATA_FILE),
- StandardCharsets.UTF_8)),
+ StandardCharsets.UTF_8)),
new BigInteger("84610235226433393380477662908529306002"));
verifyTrainingData(new ChunkSampleStream(
new PlainTextByLineStream(new MarkableFileInputStreamFactory(TEST_DATA_FILE),
- StandardCharsets.UTF_8)),
- new BigInteger("84610235226433393380477662908529306002"));
+ StandardCharsets.UTF_8)),
+ new BigInteger("84610235226433393380477662908529306002"));
}
@Test
- public void evalEnglishPerceptron() throws IOException {
+ void evalEnglishPerceptron() throws IOException {
ChunkerModel maxentModel = train(TRAIN_DATA_FILE, createPerceptronParams());
eval(maxentModel, TEST_DATA_FILE, 0.9295018353434714d);
}
@Test
- public void evalEnglishMaxentGis() throws IOException {
+ void evalEnglishMaxentGis() throws IOException {
ChunkerModel maxentModel = train(TRAIN_DATA_FILE, ModelUtil.createDefaultTrainingParameters());
eval(maxentModel, TEST_DATA_FILE, 0.9239687473746113d);
@@ -108,8 +106,8 @@ public class Conll00ChunkerEval extends AbstractEvalTest {
// Note: Don't try to run this on your MacBook
@Test
- @Category(HighMemoryUsage.class)
- public void evalEnglishMaxentQn() throws IOException {
+ @HighMemoryUsage
+ void evalEnglishMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
params.put("Threads", 4);
ChunkerModel maxentModel = train(TRAIN_DATA_FILE, params);
diff --git a/opennlp-tools/src/test/java/opennlp/tools/eval/Conll02NameFinderEval.java b/opennlp-tools/src/test/java/opennlp/tools/eval/Conll02NameFinderEval.java
index 90193d9c..bccf729c 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/eval/Conll02NameFinderEval.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/eval/Conll02NameFinderEval.java
@@ -22,9 +22,9 @@ import java.io.IOException;
import java.math.BigInteger;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.Conll02NameSampleStream;
import opennlp.tools.formats.Conll02NameSampleStream.LANGUAGE;
@@ -59,20 +59,20 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
private static File spanishTrainingFile;
private static File spanishTestAFile;
private static File spanishTestBFile;
-
+
private TokenNameFinderModel train(File trainFile, LANGUAGE lang,
- TrainingParameters params, int types) throws IOException {
+ TrainingParameters params, int types) throws IOException {
ObjectStream<NameSample> samples = new Conll02NameSampleStream(
- lang,new MarkableFileInputStreamFactory(trainFile), types);
+ lang, new MarkableFileInputStreamFactory(trainFile), types);
- return NameFinderME.train(lang.toString().toLowerCase(), null, samples,
+ return NameFinderME.train(lang.toString().toLowerCase(), null, samples,
params, new TokenNameFinderFactory());
}
private void eval(TokenNameFinderModel model, File testData, LANGUAGE lang,
- int types, double expectedFMeasure) throws IOException {
+ int types, double expectedFMeasure) throws IOException {
ObjectStream<NameSample> samples = new Conll02NameSampleStream(
lang, new MarkableFileInputStreamFactory(testData), types);
@@ -80,11 +80,11 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
TokenNameFinderEvaluator evaluator = new TokenNameFinderEvaluator(new NameFinderME(model));
evaluator.evaluate(samples);
- Assert.assertEquals(expectedFMeasure, evaluator.getFMeasure().getFMeasure(), 0.0001);
+ Assertions.assertEquals(expectedFMeasure, evaluator.getFMeasure().getFMeasure(), 0.0001);
}
-
- @BeforeClass
- public static void verifyTrainingData() throws Exception {
+
+ @BeforeAll
+ static void verifyTrainingData() throws Exception {
dutchTrainingFile = new File(getOpennlpDataDir(), "conll02/ner/data/ned.train");
dutchTestAFile = new File(getOpennlpDataDir(), "conll02/ner/data/ned.testa");
@@ -92,37 +92,37 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
spanishTrainingFile = new File(getOpennlpDataDir(), "conll02/ner/data/esp.train");
spanishTestAFile = new File(getOpennlpDataDir(), "conll02/ner/data/esp.testa");
spanishTestBFile = new File(getOpennlpDataDir(), "conll02/ner/data/esp.testb");
-
+
verifyTrainingData(new Conll02NameSampleStream(
- LANGUAGE.NLD, new MarkableFileInputStreamFactory(dutchTrainingFile),
- Conll02NameSampleStream.GENERATE_PERSON_ENTITIES),
- new BigInteger("109687424525847313767541246922170457976"));
+ LANGUAGE.NLD, new MarkableFileInputStreamFactory(dutchTrainingFile),
+ Conll02NameSampleStream.GENERATE_PERSON_ENTITIES),
+ new BigInteger("109687424525847313767541246922170457976"));
verifyTrainingData(new Conll02NameSampleStream(
- LANGUAGE.NLD, new MarkableFileInputStreamFactory(dutchTestAFile),
- Conll02NameSampleStream.GENERATE_PERSON_ENTITIES),
- new BigInteger("12942966701628852910737840182656846323"));
+ LANGUAGE.NLD, new MarkableFileInputStreamFactory(dutchTestAFile),
+ Conll02NameSampleStream.GENERATE_PERSON_ENTITIES),
+ new BigInteger("12942966701628852910737840182656846323"));
verifyTrainingData(new Conll02NameSampleStream(
- LANGUAGE.NLD, new MarkableFileInputStreamFactory(dutchTestBFile),
- Conll02NameSampleStream.GENERATE_PERSON_ENTITIES),
- new BigInteger("223206987942490952427646331013509976957"));
-
+ LANGUAGE.NLD, new MarkableFileInputStreamFactory(dutchTestBFile),
+ Conll02NameSampleStream.GENERATE_PERSON_ENTITIES),
+ new BigInteger("223206987942490952427646331013509976957"));
+
verifyTrainingData(new Conll02NameSampleStream(
- LANGUAGE.SPA, new MarkableFileInputStreamFactory(spanishTrainingFile),
- Conll02NameSampleStream.GENERATE_PERSON_ENTITIES),
- new BigInteger("226089384066775461905386060946810714487"));
+ LANGUAGE.SPA, new MarkableFileInputStreamFactory(spanishTrainingFile),
+ Conll02NameSampleStream.GENERATE_PERSON_ENTITIES),
+ new BigInteger("226089384066775461905386060946810714487"));
verifyTrainingData(new Conll02NameSampleStream(
- LANGUAGE.SPA, new MarkableFileInputStreamFactory(spanishTestAFile),
- Conll02NameSampleStream.GENERATE_PERSON_ENTITIES),
- new BigInteger("313879596837181728494732341737647284762"));
+ LANGUAGE.SPA, new MarkableFileInputStreamFactory(spanishTestAFile),
+ Conll02NameSampleStream.GENERATE_PERSON_ENTITIES),
+ new BigInteger("313879596837181728494732341737647284762"));
verifyTrainingData(new Conll02NameSampleStream(
- LANGUAGE.SPA, new MarkableFileInputStreamFactory(spanishTestBFile),
- Conll02NameSampleStream.GENERATE_PERSON_ENTITIES),
- new BigInteger("24037715705115461166858183817622459974"));
+ LANGUAGE.SPA, new MarkableFileInputStreamFactory(spanishTestBFile),
+ Conll02NameSampleStream.GENERATE_PERSON_ENTITIES),
+ new BigInteger("24037715705115461166858183817622459974"));
}
@Test
- public void evalDutchPersonPerceptron() throws IOException {
+ void evalDutchPersonPerceptron() throws IOException {
TrainingParameters params = createPerceptronParams();
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
@@ -136,7 +136,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalDutchPersonMaxentGis() throws IOException {
+ void evalDutchPersonMaxentGis() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
@@ -150,7 +150,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalDutchPersonMaxentQn() throws IOException {
+ void evalDutchPersonMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
@@ -164,7 +164,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalDutchOrganizationPerceptron() throws IOException {
+ void evalDutchOrganizationPerceptron() throws IOException {
TrainingParameters params = createPerceptronParams();
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
@@ -178,7 +178,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalDutchOrganizationMaxentGis() throws IOException {
+ void evalDutchOrganizationMaxentGis() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
@@ -192,7 +192,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalDutchOrganizationMaxentQn() throws IOException {
+ void evalDutchOrganizationMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
@@ -206,7 +206,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalDutchLocationPerceptron() throws IOException {
+ void evalDutchLocationPerceptron() throws IOException {
TrainingParameters params = createPerceptronParams();
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
@@ -220,7 +220,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalDutchLocationMaxentGis() throws IOException {
+ void evalDutchLocationMaxentGis() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
@@ -234,7 +234,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalDutchLocationMaxentQn() throws IOException {
+ void evalDutchLocationMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
@@ -248,7 +248,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalDutchMiscPerceptron() throws IOException {
+ void evalDutchMiscPerceptron() throws IOException {
TrainingParameters params = createPerceptronParams();
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
@@ -262,7 +262,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalDutchMiscMaxentGis() throws IOException {
+ void evalDutchMiscMaxentGis() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
@@ -276,7 +276,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalDutchMiscMaxentQn() throws IOException {
+ void evalDutchMiscMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
@@ -290,7 +290,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalDutchCombinedPerceptron() throws IOException {
+ void evalDutchCombinedPerceptron() throws IOException {
TrainingParameters params = createPerceptronParams();
int combinedType = Conll02NameSampleStream.GENERATE_PERSON_ENTITIES
@@ -301,13 +301,13 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
combinedType);
- eval(maxentModel, dutchTestAFile, LANGUAGE.NLD, combinedType, 0.727808326787117d);
+ eval(maxentModel, dutchTestAFile, LANGUAGE.NLD, combinedType, 0.727808326787117d);
eval(maxentModel, dutchTestBFile, LANGUAGE.NLD, combinedType, 0.7388253638253639d);
}
@Test
- public void evalDutchCombinedMaxentGis() throws IOException {
+ void evalDutchCombinedMaxentGis() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
int combinedType = Conll02NameSampleStream.GENERATE_PERSON_ENTITIES
@@ -318,13 +318,13 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
combinedType);
- eval(maxentModel, dutchTestAFile, LANGUAGE.NLD, combinedType, 0.6673209028459275d);
+ eval(maxentModel, dutchTestAFile, LANGUAGE.NLD, combinedType, 0.6673209028459275d);
eval(maxentModel, dutchTestBFile, LANGUAGE.NLD, combinedType, 0.6984085910208306d);
}
@Test
- public void evalDutchCombinedMaxentQn() throws IOException {
+ void evalDutchCombinedMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
int combinedType = Conll02NameSampleStream.GENERATE_PERSON_ENTITIES
@@ -335,13 +335,13 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
TokenNameFinderModel maxentModel = train(dutchTrainingFile, LANGUAGE.NLD, params,
combinedType);
- eval(maxentModel, dutchTestAFile, LANGUAGE.NLD, combinedType, 0.6999800915787379d);
+ eval(maxentModel, dutchTestAFile, LANGUAGE.NLD, combinedType, 0.6999800915787379d);
eval(maxentModel, dutchTestBFile, LANGUAGE.NLD, combinedType, 0.7101430258496261d);
}
@Test
- public void evalSpanishPersonPerceptron() throws IOException {
+ void evalSpanishPersonPerceptron() throws IOException {
TrainingParameters params = createPerceptronParams();
TokenNameFinderModel maxentModel = train(spanishTrainingFile, LANGUAGE.SPA, params,
@@ -355,7 +355,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalSpanishPersonMaxentGis() throws IOException {
+ void evalSpanishPersonMaxentGis() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
TokenNameFinderModel maxentModel = train(spanishTrainingFile, LANGUAGE.SPA, params,
@@ -370,7 +370,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
@Test
- public void evalSpanishPersonMaxentQn() throws IOException {
+ void evalSpanishPersonMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
TokenNameFinderModel maxentModel = train(spanishTrainingFile, LANGUAGE.SPA, params,
@@ -384,7 +384,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalSpanishOrganizationPerceptron() throws IOException {
+ void evalSpanishOrganizationPerceptron() throws IOException {
TrainingParameters params = createPerceptronParams();
TokenNameFinderModel maxentModel = train(spanishTrainingFile, LANGUAGE.SPA, params,
@@ -398,7 +398,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalSpanishOrganizationMaxentGis() throws IOException {
+ void evalSpanishOrganizationMaxentGis() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
TokenNameFinderModel maxentModel = train(spanishTrainingFile, LANGUAGE.SPA, params,
@@ -412,7 +412,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalSpanishOrganizationMaxentQn() throws IOException {
+ void evalSpanishOrganizationMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
TokenNameFinderModel maxentModel = train(spanishTrainingFile, LANGUAGE.SPA, params,
@@ -426,7 +426,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalSpanishLocationPerceptron() throws IOException {
+ void evalSpanishLocationPerceptron() throws IOException {
TrainingParameters params = createPerceptronParams();
TokenNameFinderModel maxentModel = train(spanishTrainingFile, LANGUAGE.SPA, params,
@@ -440,7 +440,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalSpanishLocationMaxentGis() throws IOException {
+ void evalSpanishLocationMaxentGis() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
TokenNameFinderModel maxentModel = train(spanishTrainingFile, LANGUAGE.SPA, params,
@@ -454,7 +454,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalSpanishLocationMaxentQn() throws IOException {
+ void evalSpanishLocationMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
TokenNameFinderModel maxentModel = train(spanishTrainingFile, LANGUAGE.SPA, params,
@@ -468,7 +468,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalSpanishMiscPerceptron() throws IOException {
+ void evalSpanishMiscPerceptron() throws IOException {
TrainingParameters params = createPerceptronParams();
TokenNameFinderModel maxentModel = train(spanishTrainingFile, LANGUAGE.SPA, params,
@@ -482,7 +482,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalSpanishMiscMaxentGis() throws IOException {
+ void evalSpanishMiscMaxentGis() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
TokenNameFinderModel maxentModel = train(spanishTrainingFile, LANGUAGE.SPA, params,
@@ -496,7 +496,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalSpanishMiscMaxentQn() throws IOException {
+ void evalSpanishMiscMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
TokenNameFinderModel maxentModel = train(spanishTrainingFile, LANGUAGE.SPA, params,
@@ -510,7 +510,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalSpanishCombinedPerceptron() throws IOException {
+ void evalSpanishCombinedPerceptron() throws IOException {
TrainingParameters params = createPerceptronParams();
int combinedType = Conll02NameSampleStream.GENERATE_PERSON_ENTITIES
@@ -527,7 +527,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalSpanishCombinedMaxentGis() throws IOException {
+ void evalSpanishCombinedMaxentGis() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
int combinedType = Conll02NameSampleStream.GENERATE_PERSON_ENTITIES
@@ -544,7 +544,7 @@ public class Conll02NameFinderEval extends AbstractEvalTest {
}
@Test
- public void evalSpanishCombinedMaxentQn() throws IOException {
+ void evalSpanishCombinedMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
int combinedType = Conll02NameSampleStream.GENERATE_PERSON_ENTITIES
diff --git a/opennlp-tools/src/test/java/opennlp/tools/eval/ConllXPosTaggerEval.java b/opennlp-tools/src/test/java/opennlp/tools/eval/ConllXPosTaggerEval.java
index 64aa5a0b..ac71faf8 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/eval/ConllXPosTaggerEval.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/eval/ConllXPosTaggerEval.java
@@ -22,10 +22,9 @@ import java.io.IOException;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import opennlp.tools.HighMemoryUsage;
import opennlp.tools.formats.ConllXPOSSampleStream;
@@ -60,7 +59,7 @@ import opennlp.tools.util.model.ModelUtil;
public class ConllXPosTaggerEval extends AbstractEvalTest {
private POSModel train(File trainFile, String lang,
- TrainingParameters params) throws IOException {
+ TrainingParameters params) throws IOException {
ObjectStream<POSSample> samples =
new ConllXPOSSampleStream(new MarkableFileInputStreamFactory(trainFile), StandardCharsets.UTF_8);
@@ -69,7 +68,7 @@ public class ConllXPosTaggerEval extends AbstractEvalTest {
}
private void eval(POSModel model, File testData,
- double expectedAccuracy) throws IOException {
+ double expectedAccuracy) throws IOException {
ObjectStream<POSSample> samples = new ConllXPOSSampleStream(
new MarkableFileInputStreamFactory(testData), StandardCharsets.UTF_8);
@@ -77,51 +76,55 @@ public class ConllXPosTaggerEval extends AbstractEvalTest {
POSEvaluator evaluator = new POSEvaluator(new POSTaggerME(model));
evaluator.evaluate(samples);
- Assert.assertEquals(expectedAccuracy, evaluator.getWordAccuracy(), 0.0001);
+ Assertions.assertEquals(expectedAccuracy, evaluator.getWordAccuracy(), 0.0001);
}
- @BeforeClass
- public static void verifyTrainingData() throws Exception {
-
+ @BeforeAll
+ static void verifyTrainingData() throws Exception {
+
verifyTrainingData(new ConllXPOSSampleStream(
- new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
- "conllx/data/danish/ddt/train/danish_ddt_train.conll")), StandardCharsets.UTF_8),
+ new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
+ "conllx/data/danish/ddt/train/danish_ddt_train.conll")), StandardCharsets.UTF_8),
new BigInteger("30795670444498617202001550516753630016"));
-
+
verifyTrainingData(new ConllXPOSSampleStream(
- new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
- "conllx/data/danish/ddt/test/danish_ddt_test.conll")), StandardCharsets.UTF_8),
- new BigInteger("314104267846430512372780024568104131337"));
-
+ new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
+ "conllx/data/danish/ddt/test/danish_ddt_test.conll")), StandardCharsets.UTF_8),
+ new BigInteger("314104267846430512372780024568104131337"));
+
verifyTrainingData(new ConllXPOSSampleStream(
- new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
- "conllx/data/dutch/alpino/train/dutch_alpino_train.conll")), StandardCharsets.UTF_8),
- new BigInteger("109328245573060521952850454797286933887"));
+ new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
+ "conllx/data/dutch/alpino/train/dutch_alpino_train.conll")), StandardCharsets.UTF_8),
+ new BigInteger("109328245573060521952850454797286933887"));
verifyTrainingData(new ConllXPOSSampleStream(
- new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
- "conllx/data/dutch/alpino/test/dutch_alpino_test.conll")), StandardCharsets.UTF_8),
- new BigInteger("132343141132816640849897155456916243039"));
+ new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
+ "conllx/data/dutch/alpino/test/dutch_alpino_test.conll")),
+ StandardCharsets.UTF_8),
+ new BigInteger("132343141132816640849897155456916243039"));
verifyTrainingData(new ConllXPOSSampleStream(
- new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
- "conllx/data/portuguese/bosque/treebank/portuguese_bosque_train.conll")), StandardCharsets.UTF_8),
- new BigInteger("9504382474772307801979515927230835901"));
+ new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
+ "conllx/data/portuguese/bosque/treebank/portuguese_bosque_train.conll")),
+ StandardCharsets.UTF_8),
+ new BigInteger("9504382474772307801979515927230835901"));
verifyTrainingData(new ConllXPOSSampleStream(
- new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
- "conllx/data/swedish/talbanken05/train/swedish_talbanken05_train.conll")), StandardCharsets.UTF_8),
- new BigInteger("175256039869578311901318972681191182910"));
+ new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
+ "conllx/data/swedish/talbanken05/train/swedish_talbanken05_train.conll")),
+ StandardCharsets.UTF_8),
+ new BigInteger("175256039869578311901318972681191182910"));
verifyTrainingData(new ConllXPOSSampleStream(
- new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
- "conllx/data/swedish/talbanken05/test/swedish_talbanken05_test.conll")), StandardCharsets.UTF_8),
- new BigInteger("128378790384268106811747599235147991544"));
-
+ new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
+ "conllx/data/swedish/talbanken05/test/swedish_talbanken05_test.conll")),
+ StandardCharsets.UTF_8),
+ new BigInteger("128378790384268106811747599235147991544"));
+
}
@Test
- public void evalDanishMaxentGis() throws IOException {
+ void evalDanishMaxentGis() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
POSModel maxentModel = train(new File(getOpennlpDataDir(),
@@ -132,7 +135,7 @@ public class ConllXPosTaggerEval extends AbstractEvalTest {
}
@Test
- public void evalDanishMaxentQn() throws IOException {
+ void evalDanishMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
POSModel maxentModel = train(new File(getOpennlpDataDir(),
@@ -143,7 +146,7 @@ public class ConllXPosTaggerEval extends AbstractEvalTest {
}
@Test
- public void evalDutchMaxentGis() throws IOException {
+ void evalDutchMaxentGis() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
POSModel maxentModel = train(new File(getOpennlpDataDir(),
@@ -154,8 +157,8 @@ public class ConllXPosTaggerEval extends AbstractEvalTest {
}
@Test
- @Category(HighMemoryUsage.class)
- public void evalDutchMaxentQn() throws IOException {
+ @HighMemoryUsage
+ void evalDutchMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
POSModel maxentModel = train(new File(getOpennlpDataDir(),
@@ -166,7 +169,7 @@ public class ConllXPosTaggerEval extends AbstractEvalTest {
}
@Test
- public void evalPortugueseMaxentGis() throws IOException {
+ void evalPortugueseMaxentGis() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
POSModel maxentModel = train(new File(getOpennlpDataDir(),
@@ -177,7 +180,7 @@ public class ConllXPosTaggerEval extends AbstractEvalTest {
}
@Test
- public void evalPortugueseMaxentQn() throws IOException {
+ void evalPortugueseMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
POSModel maxentModel = train(new File(getOpennlpDataDir(),
@@ -188,7 +191,7 @@ public class ConllXPosTaggerEval extends AbstractEvalTest {
}
@Test
- public void evalSwedishMaxentGis() throws IOException {
+ void evalSwedishMaxentGis() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
POSModel maxentModel = train(new File(getOpennlpDataDir(),
@@ -199,7 +202,7 @@ public class ConllXPosTaggerEval extends AbstractEvalTest {
}
@Test
- public void evalSwedishMaxentQn() throws IOException {
+ void evalSwedishMaxentQn() throws IOException {
TrainingParameters params = createMaxentQnParams();
POSModel maxentModel = train(new File(getOpennlpDataDir(),
diff --git a/opennlp-tools/src/test/java/opennlp/tools/eval/OntoNotes4NameFinderEval.java b/opennlp-tools/src/test/java/opennlp/tools/eval/OntoNotes4NameFinderEval.java
index 1ce225af..dddb6546 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/eval/OntoNotes4NameFinderEval.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/eval/OntoNotes4NameFinderEval.java
@@ -30,9 +30,9 @@ import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Map;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import opennlp.tools.cmdline.namefind.TokenNameFinderTrainerTool;
import opennlp.tools.formats.DirectorySampleStream;
@@ -80,39 +80,39 @@ public class OntoNotes4NameFinderEval extends AbstractEvalTest {
cv.evaluate(filteredSamples, 5);
- Assert.assertEquals(expectedScore, cv.getFMeasure().getFMeasure(), 0.001d);
+ Assertions.assertEquals(expectedScore, cv.getFMeasure().getFMeasure(), 0.001d);
}
}
- @BeforeClass
- public static void verifyTrainingData() throws Exception {
+ @BeforeAll
+ static void verifyTrainingData() throws Exception {
verifyDirectoryChecksum(new File(getOpennlpDataDir(), "ontonotes4/data/files/data/english").toPath(),
".name", new BigInteger("74675117716526375898817028829433420680"));
}
@Test
- public void evalEnglishPersonNameFinder() throws IOException {
+ void evalEnglishPersonNameFinder() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
params.put("Threads", "4");
crossEval(params, "person", 0.822014580552418d);
}
@Test
- public void evalEnglishDateNameFinder() throws IOException {
+ void evalEnglishDateNameFinder() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
params.put("Threads", "4");
crossEval(params, "date", 0.8043873255040994d);
}
@Test
- public void evalAllTypesNameFinder() throws IOException {
+ void evalAllTypesNameFinder() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
params.put("Threads", "4");
crossEval(params, null, 0.8014054850253551d);
}
@Test
- public void evalAllTypesWithPOSNameFinder() throws IOException, URISyntaxException {
+ void evalAllTypesWithPOSNameFinder() throws IOException, URISyntaxException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
params.put("Threads", "4");
@@ -136,7 +136,7 @@ public class OntoNotes4NameFinderEval extends AbstractEvalTest {
StandardCopyOption.REPLACE_EXISTING);
Map<String, Object> resources = TokenNameFinderTrainerTool.loadResources(resourcesPath.toFile(),
- Paths.get(this.getClass().getResource("ner-en_pos-features.xml").toURI()).toFile());
+ Paths.get(this.getClass().getResource("ner-en_pos-features.xml").toURI()).toFile());
try (ObjectStream<NameSample> samples = createNameSampleStream()) {
@@ -149,7 +149,7 @@ public class OntoNotes4NameFinderEval extends AbstractEvalTest {
cv.evaluate(filteredSamples, 5);
- Assert.assertEquals(0.8070226153653437d, cv.getFMeasure().getFMeasure(), 0.001d);
+ Assertions.assertEquals(0.8070226153653437d, cv.getFMeasure().getFMeasure(), 0.001d);
}
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/eval/OntoNotes4ParserEval.java b/opennlp-tools/src/test/java/opennlp/tools/eval/OntoNotes4ParserEval.java
index 5229b36d..9d877464 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/eval/OntoNotes4ParserEval.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/eval/OntoNotes4ParserEval.java
@@ -24,9 +24,9 @@ import java.io.InputStreamReader;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.DirectorySampleStream;
import opennlp.tools.formats.convert.FileToStringSampleStream;
@@ -65,17 +65,17 @@ public class OntoNotes4ParserEval extends AbstractEvalTest {
ParserCrossValidator cv = new ParserCrossValidator("eng", params, rules, ParserType.CHUNKING);
cv.evaluate(samples, 5);
- Assert.assertEquals(expectedScore, cv.getFMeasure().getFMeasure(), 0.0001d);
+ Assertions.assertEquals(expectedScore, cv.getFMeasure().getFMeasure(), 0.0001d);
}
}
- @BeforeClass
- public static void verifyTrainingData() throws Exception {
+ @BeforeAll
+ static void verifyTrainingData() throws Exception {
verifyTrainingData(createParseSampleStream(), new BigInteger("83833369887442127665956850482411800415"));
}
@Test
- public void evalEnglishMaxent() throws IOException {
+ void evalEnglishMaxent() throws IOException {
HeadRules headRules;
try (InputStream headRulesIn =
diff --git a/opennlp-tools/src/test/java/opennlp/tools/eval/OntoNotes4PosTaggerEval.java b/opennlp-tools/src/test/java/opennlp/tools/eval/OntoNotes4PosTaggerEval.java
index a373192b..d3c4c8a5 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/eval/OntoNotes4PosTaggerEval.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/eval/OntoNotes4PosTaggerEval.java
@@ -23,9 +23,9 @@ import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.DirectorySampleStream;
import opennlp.tools.formats.convert.FileToStringSampleStream;
@@ -63,17 +63,17 @@ public class OntoNotes4PosTaggerEval extends AbstractEvalTest {
POSTaggerCrossValidator cv = new POSTaggerCrossValidator("eng", params, new POSTaggerFactory());
cv.evaluate(samples, 5);
- Assert.assertEquals(expectedScore, cv.getWordAccuracy(), 0.0001d);
+ Assertions.assertEquals(expectedScore, cv.getWordAccuracy(), 0.0001d);
}
}
- @BeforeClass
- public static void verifyTrainingData() throws Exception {
+ @BeforeAll
+ static void verifyTrainingData() throws Exception {
verifyTrainingData(createPOSSampleStream(), new BigInteger("300430765214895870888056958221353356972"));
}
-
+
@Test
- public void evalEnglishMaxentTagger() throws IOException {
+ void evalEnglishMaxentTagger() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
params.put("Threads", "4");
diff --git a/opennlp-tools/src/test/java/opennlp/tools/eval/SourceForgeModelEval.java b/opennlp-tools/src/test/java/opennlp/tools/eval/SourceForgeModelEval.java
index fd116d2f..df0d6f76 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/eval/SourceForgeModelEval.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/eval/SourceForgeModelEval.java
@@ -28,9 +28,9 @@ import java.util.Collections;
import java.util.List;
import java.util.Objects;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import opennlp.tools.chunker.Chunker;
import opennlp.tools.chunker.ChunkerME;
@@ -125,7 +125,7 @@ public class SourceForgeModelEval extends AbstractEvalTest {
private final Tokenizer tokenizer;
private LeipzigTestSampleStream(int sentencePerDocument, Tokenizer tokenizer, InputStreamFactory in)
- throws IOException {
+ throws IOException {
super(new PlainTextByLineStream(in, StandardCharsets.UTF_8));
this.sentencePerDocument = sentencePerDocument;
this.tokenizer = tokenizer;
@@ -159,19 +159,19 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
}
- @BeforeClass
- public static void verifyTrainingData() throws Exception {
+ @BeforeAll
+ static void verifyTrainingData() throws Exception {
verifyTrainingData(new LeipzigTestSampleStream(25, SimpleTokenizer.INSTANCE,
new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
- "leipzig/eng_news_2010_300K-sentences.txt"))),
+ "leipzig/eng_news_2010_300K-sentences.txt"))),
new BigInteger("172812413483919324675263268750583851712"));
}
@Test
- public void evalSentenceModel() throws Exception {
+ void evalSentenceModel() throws Exception {
SentenceModel model = new SentenceModel(
- new File(getOpennlpDataDir(), "models-sf/en-sent.bin"));
+ new File(getOpennlpDataDir(), "models-sf/en-sent.bin"));
MessageDigest digest = MessageDigest.getInstance(HASH_ALGORITHM);
@@ -180,9 +180,9 @@ public class SourceForgeModelEval extends AbstractEvalTest {
StringBuilder text = new StringBuilder();
try (ObjectStream<LeipzigTestSample> lineBatches = new LeipzigTestSampleStream(25,
- SimpleTokenizer.INSTANCE,
- new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
- "leipzig/eng_news_2010_300K-sentences.txt")))) {
+ SimpleTokenizer.INSTANCE,
+ new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
+ "leipzig/eng_news_2010_300K-sentences.txt")))) {
LeipzigTestSample lineBatch;
while ((lineBatch = lineBatches.read()) != null) {
@@ -196,28 +196,28 @@ public class SourceForgeModelEval extends AbstractEvalTest {
digest.update(sentence.getBytes(StandardCharsets.UTF_8));
}
- Assert.assertEquals(new BigInteger("228544068397077998410949364710969159291"),
- new BigInteger(1, digest.digest()));
+ Assertions.assertEquals(new BigInteger("228544068397077998410949364710969159291"),
+ new BigInteger(1, digest.digest()));
}
@Test
- public void evalTokenModel() throws Exception {
+ void evalTokenModel() throws Exception {
// the input stream is currently tokenized, we should detokenize it again,
// (or extend to pass in tokenizer, then whitespace tokenizer can be passed)
// and then tokenize it here
TokenizerModel model = new TokenizerModel(
- new File(getOpennlpDataDir(), "models-sf/en-token.bin"));
+ new File(getOpennlpDataDir(), "models-sf/en-token.bin"));
MessageDigest digest = MessageDigest.getInstance(HASH_ALGORITHM);
Tokenizer tokenizer = new TokenizerME(model);
try (ObjectStream<LeipzigTestSample> lines = new LeipzigTestSampleStream(1,
- WhitespaceTokenizer.INSTANCE,
- new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
- "leipzig/eng_news_2010_300K-sentences.txt")))) {
+ WhitespaceTokenizer.INSTANCE,
+ new MarkableFileInputStreamFactory(new File(getOpennlpDataDir(),
+ "leipzig/eng_news_2010_300K-sentences.txt")))) {
LeipzigTestSample line;
while ((line = lines.read()) != null) {
@@ -228,8 +228,8 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
}
- Assert.assertEquals(new BigInteger("180602607571756839321060482558626151930"),
- new BigInteger(1, digest.digest()));
+ Assertions.assertEquals(new BigInteger("180602607571756839321060482558626151930"),
+ new BigInteger(1, digest.digest()));
}
private ObjectStream<LeipzigTestSample> createLineWiseStream() throws IOException {
@@ -259,11 +259,11 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
}
- Assert.assertEquals(expectedHash, new BigInteger(1, digest.digest()));
+ Assertions.assertEquals(expectedHash, new BigInteger(1, digest.digest()));
}
@Test
- public void evalNerDateModel() throws Exception {
+ void evalNerDateModel() throws Exception {
TokenNameFinderModel personModel = new TokenNameFinderModel(
new File(getOpennlpDataDir(), "models-sf/en-ner-date.bin"));
@@ -271,7 +271,7 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
@Test
- public void evalNerLocationModel() throws Exception {
+ void evalNerLocationModel() throws Exception {
TokenNameFinderModel personModel = new TokenNameFinderModel(
new File(getOpennlpDataDir(), "models-sf/en-ner-location.bin"));
@@ -279,7 +279,7 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
@Test
- public void evalNerMoneyModel() throws Exception {
+ void evalNerMoneyModel() throws Exception {
TokenNameFinderModel personModel = new TokenNameFinderModel(
new File(getOpennlpDataDir(), "models-sf/en-ner-money.bin"));
@@ -287,7 +287,7 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
@Test
- public void evalNerOrganizationModel() throws Exception {
+ void evalNerOrganizationModel() throws Exception {
TokenNameFinderModel personModel = new TokenNameFinderModel(
new File(getOpennlpDataDir(), "models-sf/en-ner-organization.bin"));
@@ -295,7 +295,7 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
@Test
- public void evalNerPercentageModel() throws Exception {
+ void evalNerPercentageModel() throws Exception {
TokenNameFinderModel personModel = new TokenNameFinderModel(
new File(getOpennlpDataDir(), "models-sf/en-ner-percentage.bin"));
@@ -303,7 +303,7 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
@Test
- public void evalNerPersonModel() throws Exception {
+ void evalNerPersonModel() throws Exception {
TokenNameFinderModel personModel = new TokenNameFinderModel(
new File(getOpennlpDataDir(), "models-sf/en-ner-person.bin"));
@@ -311,7 +311,7 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
@Test
- public void evalNerTimeModel() throws Exception {
+ void evalNerTimeModel() throws Exception {
TokenNameFinderModel personModel = new TokenNameFinderModel(
new File(getOpennlpDataDir(), "models-sf/en-ner-time.bin"));
@@ -319,7 +319,7 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
@Test
- public void evalChunkerModel() throws Exception {
+ void evalChunkerModel() throws Exception {
MessageDigest digest = MessageDigest.getInstance(HASH_ALGORITHM);
@@ -342,7 +342,7 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
}
- Assert.assertEquals(new BigInteger("226003515785585284478071030961407561943"),
+ Assertions.assertEquals(new BigInteger("226003515785585284478071030961407561943"),
new BigInteger(1, digest.digest()));
}
@@ -366,11 +366,11 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
}
- Assert.assertEquals(expectedHash, new BigInteger(1, digest.digest()));
+ Assertions.assertEquals(expectedHash, new BigInteger(1, digest.digest()));
}
@Test
- public void evalMaxentModel() throws Exception {
+ void evalMaxentModel() throws Exception {
POSModel maxentModel = new POSModel(
new File(getOpennlpDataDir(), "models-sf/en-pos-maxent.bin"));
@@ -378,7 +378,7 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
@Test
- public void evalPerceptronModel() throws Exception {
+ void evalPerceptronModel() throws Exception {
POSModel perceptronModel = new POSModel(
new File(getOpennlpDataDir(), "models-sf/en-pos-perceptron.bin"));
@@ -386,7 +386,7 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
@Test
- public void evalParserModel() throws Exception {
+ void evalParserModel() throws Exception {
ParserModel model = new ParserModel(
new File(getOpennlpDataDir(), "models-sf/en-parser-chunking.bin"));
@@ -410,7 +410,7 @@ public class SourceForgeModelEval extends AbstractEvalTest {
}
}
- Assert.assertEquals(new BigInteger("68039262350771988792233880373220954061"),
+ Assertions.assertEquals(new BigInteger("68039262350771988792233880373220954061"),
new BigInteger(1, digest.digest()));
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/eval/UniversalDependency20Eval.java b/opennlp-tools/src/test/java/opennlp/tools/eval/UniversalDependency20Eval.java
index 3ab1a7d3..dd3dd00e 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/eval/UniversalDependency20Eval.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/eval/UniversalDependency20Eval.java
@@ -21,9 +21,9 @@ import java.io.File;
import java.io.IOException;
import java.math.BigInteger;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.conllu.ConlluLemmaSampleStream;
import opennlp.tools.formats.conllu.ConlluStream;
@@ -40,14 +40,14 @@ import opennlp.tools.util.model.ModelUtil;
public class UniversalDependency20Eval extends AbstractEvalTest {
- private static File SPA_ANCORA_TRAIN;
+ private static File SPA_ANCORA_TRAIN;
private static File SPA_ANCORA_DEV;
- @BeforeClass
- public static void verifyTrainingData() throws Exception {
+ @BeforeAll
+ static void verifyTrainingData() throws Exception {
- SPA_ANCORA_TRAIN = new File(getOpennlpDataDir(),"ud20/UD_Spanish-AnCora/es_ancora-ud-train.conllu");
- SPA_ANCORA_DEV = new File(getOpennlpDataDir(),"ud20/UD_Spanish-AnCora/es_ancora-ud-dev.conllu");
+ SPA_ANCORA_TRAIN = new File(getOpennlpDataDir(), "ud20/UD_Spanish-AnCora/es_ancora-ud-train.conllu");
+ SPA_ANCORA_DEV = new File(getOpennlpDataDir(), "ud20/UD_Spanish-AnCora/es_ancora-ud-dev.conllu");
verifyFileChecksum(SPA_ANCORA_TRAIN.toPath(),
new BigInteger("224942804200733453179524127037951530195"));
@@ -56,7 +56,7 @@ public class UniversalDependency20Eval extends AbstractEvalTest {
}
private double trainAndEval(String lang, File trainFile, TrainingParameters params,
- File evalFile) throws IOException {
+ File evalFile) throws IOException {
ConlluTagset tagset = ConlluTagset.X;
ObjectStream<LemmaSample> trainSamples = new ConlluLemmaSampleStream(new ConlluStream(
@@ -72,13 +72,13 @@ public class UniversalDependency20Eval extends AbstractEvalTest {
}
@Test
- public void trainAndEvalSpanishAncora() throws IOException {
+ void trainAndEvalSpanishAncora() throws IOException {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
params.put("Threads", "4");
double wordAccuracy = trainAndEval("spa", SPA_ANCORA_TRAIN,
params, SPA_ANCORA_DEV);
- Assert.assertEquals(0.9057341692068787d, wordAccuracy, ACCURACY_DELTA);
+ Assertions.assertEquals(0.9057341692068787d, wordAccuracy, ACCURACY_DELTA);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/Conll02NameSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/Conll02NameSampleStreamTest.java
index bc0d4fda..8d86ed36 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/Conll02NameSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/Conll02NameSampleStreamTest.java
@@ -19,8 +19,8 @@ package opennlp.tools.formats;
import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.Conll02NameSampleStream.LANGUAGE;
import opennlp.tools.namefind.NameSample;
@@ -42,52 +42,52 @@ public class Conll02NameSampleStreamTest {
}
@Test
- public void testParsingSpanishSample() throws IOException {
+ void testParsingSpanishSample() throws IOException {
ObjectStream<NameSample> sampleStream = openData(LANGUAGE.SPA, "conll2002-es.sample");
NameSample personName = sampleStream.read();
- Assert.assertNotNull(personName);
+ Assertions.assertNotNull(personName);
- Assert.assertEquals(5, personName.getSentence().length);
- Assert.assertEquals(1, personName.getNames().length);
- Assert.assertEquals(true, personName.isClearAdaptiveDataSet());
+ Assertions.assertEquals(5, personName.getSentence().length);
+ Assertions.assertEquals(1, personName.getNames().length);
+ Assertions.assertEquals(true, personName.isClearAdaptiveDataSet());
Span nameSpan = personName.getNames()[0];
- Assert.assertEquals(0, nameSpan.getStart());
- Assert.assertEquals(4, nameSpan.getEnd());
- Assert.assertEquals(true, personName.isClearAdaptiveDataSet());
+ Assertions.assertEquals(0, nameSpan.getStart());
+ Assertions.assertEquals(4, nameSpan.getEnd());
+ Assertions.assertEquals(true, personName.isClearAdaptiveDataSet());
- Assert.assertEquals(0, sampleStream.read().getNames().length);
+ Assertions.assertEquals(0, sampleStream.read().getNames().length);
- Assert.assertNull(sampleStream.read());
+ Assertions.assertNull(sampleStream.read());
}
@Test
- public void testParsingDutchSample() throws IOException {
+ void testParsingDutchSample() throws IOException {
ObjectStream<NameSample> sampleStream = openData(LANGUAGE.NLD, "conll2002-nl.sample");
NameSample personName = sampleStream.read();
- Assert.assertEquals(0, personName.getNames().length);
- Assert.assertTrue(personName.isClearAdaptiveDataSet());
+ Assertions.assertEquals(0, personName.getNames().length);
+ Assertions.assertTrue(personName.isClearAdaptiveDataSet());
personName = sampleStream.read();
- Assert.assertFalse(personName.isClearAdaptiveDataSet());
+ Assertions.assertFalse(personName.isClearAdaptiveDataSet());
- Assert.assertNull(sampleStream.read());
+ Assertions.assertNull(sampleStream.read());
}
@Test
- public void testReset() throws IOException {
+ void testReset() throws IOException {
ObjectStream<NameSample> sampleStream = openData(LANGUAGE.NLD, "conll2002-nl.sample");
NameSample sample = sampleStream.read();
sampleStream.reset();
- Assert.assertEquals(sample, sampleStream.read());
+ Assertions.assertEquals(sample, sampleStream.read());
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/Conll03NameSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/Conll03NameSampleStreamTest.java
index 42ce7153..5456dfe8 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/Conll03NameSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/Conll03NameSampleStreamTest.java
@@ -19,8 +19,8 @@ package opennlp.tools.formats;
import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.Conll03NameSampleStream.LANGUAGE;
import opennlp.tools.namefind.NameSample;
@@ -45,65 +45,69 @@ public class Conll03NameSampleStreamTest {
}
@Test
- public void testParsingEnglishSample() throws IOException {
+ void testParsingEnglishSample() throws IOException {
ObjectStream<NameSample> sampleStream = openData(LANGUAGE.EN, ENGLISH_SAMPLE);
NameSample personName = sampleStream.read();
- Assert.assertNotNull(personName);
+ Assertions.assertNotNull(personName);
- Assert.assertEquals(9, personName.getSentence().length);
- Assert.assertEquals(0, personName.getNames().length);
- Assert.assertEquals(true, personName.isClearAdaptiveDataSet());
+ Assertions.assertEquals(9, personName.getSentence().length);
+ Assertions.assertEquals(0, personName.getNames().length);
+ Assertions.assertEquals(true, personName.isClearAdaptiveDataSet());
personName = sampleStream.read();
- Assert.assertNotNull(personName);
+ Assertions.assertNotNull(personName);
- Assert.assertEquals(2, personName.getSentence().length);
- Assert.assertEquals(1, personName.getNames().length);
- Assert.assertEquals(false, personName.isClearAdaptiveDataSet());
+ Assertions.assertEquals(2, personName.getSentence().length);
+ Assertions.assertEquals(1, personName.getNames().length);
+ Assertions.assertEquals(false, personName.isClearAdaptiveDataSet());
Span nameSpan = personName.getNames()[0];
- Assert.assertEquals(0, nameSpan.getStart());
- Assert.assertEquals(2, nameSpan.getEnd());
+ Assertions.assertEquals(0, nameSpan.getStart());
+ Assertions.assertEquals(2, nameSpan.getEnd());
- Assert.assertNull(sampleStream.read());
+ Assertions.assertNull(sampleStream.read());
}
- @Test(expected = IOException.class)
- public void testParsingEnglishSampleWithGermanAsLanguage() throws IOException {
- ObjectStream<NameSample> sampleStream = openData(LANGUAGE.DE, ENGLISH_SAMPLE);
- sampleStream.read();
+ @Test
+ void testParsingEnglishSampleWithGermanAsLanguage() {
+ Assertions.assertThrows(IOException.class, () -> {
+ ObjectStream<NameSample> sampleStream = openData(LANGUAGE.DE, ENGLISH_SAMPLE);
+ sampleStream.read();
+ });
}
- @Test(expected = IOException.class)
- public void testParsingGermanSampleWithEnglishAsLanguage() throws IOException {
- ObjectStream<NameSample> sampleStream = openData(LANGUAGE.EN, GERMAN_SAMPLE);
- sampleStream.read();
+ @Test
+ void testParsingGermanSampleWithEnglishAsLanguage() {
+ Assertions.assertThrows(IOException.class, () -> {
+ ObjectStream<NameSample> sampleStream = openData(LANGUAGE.EN, GERMAN_SAMPLE);
+ sampleStream.read();
+ });
}
@Test
- public void testParsingGermanSample() throws IOException {
+ void testParsingGermanSample() throws IOException {
ObjectStream<NameSample> sampleStream = openData(LANGUAGE.DE, GERMAN_SAMPLE);
NameSample personName = sampleStream.read();
- Assert.assertNotNull(personName);
+ Assertions.assertNotNull(personName);
- Assert.assertEquals(5, personName.getSentence().length);
- Assert.assertEquals(0, personName.getNames().length);
- Assert.assertEquals(true, personName.isClearAdaptiveDataSet());
+ Assertions.assertEquals(5, personName.getSentence().length);
+ Assertions.assertEquals(0, personName.getNames().length);
+ Assertions.assertEquals(true, personName.isClearAdaptiveDataSet());
}
@Test
- public void testReset() throws IOException {
+ void testReset() throws IOException {
ObjectStream<NameSample> sampleStream = openData(LANGUAGE.DE, GERMAN_SAMPLE);
NameSample sample = sampleStream.read();
sampleStream.reset();
- Assert.assertEquals(sample, sampleStream.read());
+ Assertions.assertEquals(sample, sampleStream.read());
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/ConllXPOSSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/ConllXPOSSampleStreamTest.java
index ca20f7eb..2bb3b67e 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/ConllXPOSSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/ConllXPOSSampleStreamTest.java
@@ -20,19 +20,17 @@ package opennlp.tools.formats;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.postag.POSSample;
import opennlp.tools.util.InputStreamFactory;
import opennlp.tools.util.ObjectStream;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-
public class ConllXPOSSampleStreamTest {
@Test
- public void testParsingSample() throws IOException {
+ void testParsingSample() throws IOException {
InputStreamFactory in = new ResourceAsStreamFactory(ConllXPOSSampleStreamTest.class,
"/opennlp/tools/formats/conllx.sample");
@@ -43,120 +41,120 @@ public class ConllXPOSSampleStreamTest {
String[] aSentence = a.getSentence();
String[] aTags = a.getTags();
- assertEquals(22, aSentence.length);
- assertEquals(22, aTags.length);
+ Assertions.assertEquals(22, aSentence.length);
+ Assertions.assertEquals(22, aTags.length);
- assertEquals("To", aSentence[0]);
- assertEquals("AC", aTags[0]);
+ Assertions.assertEquals("To", aSentence[0]);
+ Assertions.assertEquals("AC", aTags[0]);
- assertEquals("kendte", aSentence[1]);
- assertEquals("AN", aTags[1]);
+ Assertions.assertEquals("kendte", aSentence[1]);
+ Assertions.assertEquals("AN", aTags[1]);
- assertEquals("russiske", aSentence[2]);
- assertEquals("AN", aTags[2]);
+ Assertions.assertEquals("russiske", aSentence[2]);
+ Assertions.assertEquals("AN", aTags[2]);
- assertEquals("historikere", aSentence[3]);
- assertEquals("NC", aTags[3]);
+ Assertions.assertEquals("historikere", aSentence[3]);
+ Assertions.assertEquals("NC", aTags[3]);
- assertEquals("Andronik", aSentence[4]);
- assertEquals("NP", aTags[4]);
+ Assertions.assertEquals("Andronik", aSentence[4]);
+ Assertions.assertEquals("NP", aTags[4]);
- assertEquals("Andronik", aSentence[5]);
- assertEquals("NP", aTags[5]);
+ Assertions.assertEquals("Andronik", aSentence[5]);
+ Assertions.assertEquals("NP", aTags[5]);
- assertEquals("og", aSentence[6]);
- assertEquals("CC", aTags[6]);
+ Assertions.assertEquals("og", aSentence[6]);
+ Assertions.assertEquals("CC", aTags[6]);
- assertEquals("Igor", aSentence[7]);
- assertEquals("NP", aTags[7]);
+ Assertions.assertEquals("Igor", aSentence[7]);
+ Assertions.assertEquals("NP", aTags[7]);
- assertEquals("Klamkin", aSentence[8]);
- assertEquals("NP", aTags[8]);
+ Assertions.assertEquals("Klamkin", aSentence[8]);
+ Assertions.assertEquals("NP", aTags[8]);
- assertEquals("tror", aSentence[9]);
- assertEquals("VA", aTags[9]);
+ Assertions.assertEquals("tror", aSentence[9]);
+ Assertions.assertEquals("VA", aTags[9]);
- assertEquals("ikke", aSentence[10]);
- assertEquals("RG", aTags[10]);
+ Assertions.assertEquals("ikke", aSentence[10]);
+ Assertions.assertEquals("RG", aTags[10]);
- assertEquals(",", aSentence[11]);
- assertEquals("XP", aTags[11]);
+ Assertions.assertEquals(",", aSentence[11]);
+ Assertions.assertEquals("XP", aTags[11]);
- assertEquals("at", aSentence[12]);
- assertEquals("CS", aTags[12]);
+ Assertions.assertEquals("at", aSentence[12]);
+ Assertions.assertEquals("CS", aTags[12]);
- assertEquals("Rusland", aSentence[13]);
- assertEquals("NP", aTags[13]);
+ Assertions.assertEquals("Rusland", aSentence[13]);
+ Assertions.assertEquals("NP", aTags[13]);
- assertEquals("kan", aSentence[14]);
- assertEquals("VA", aTags[14]);
+ Assertions.assertEquals("kan", aSentence[14]);
+ Assertions.assertEquals("VA", aTags[14]);
- assertEquals("udvikles", aSentence[15]);
- assertEquals("VA", aTags[15]);
+ Assertions.assertEquals("udvikles", aSentence[15]);
+ Assertions.assertEquals("VA", aTags[15]);
- assertEquals("uden", aSentence[16]);
- assertEquals("SP", aTags[16]);
+ Assertions.assertEquals("uden", aSentence[16]);
+ Assertions.assertEquals("SP", aTags[16]);
- assertEquals("en", aSentence[17]);
- assertEquals("PI", aTags[17]);
+ Assertions.assertEquals("en", aSentence[17]);
+ Assertions.assertEquals("PI", aTags[17]);
- assertEquals("\"", aSentence[18]);
- assertEquals("XP", aTags[18]);
+ Assertions.assertEquals("\"", aSentence[18]);
+ Assertions.assertEquals("XP", aTags[18]);
- assertEquals("jernnæve", aSentence[19]);
- assertEquals("NC", aTags[19]);
+ Assertions.assertEquals("jernnæve", aSentence[19]);
+ Assertions.assertEquals("NC", aTags[19]);
- assertEquals("\"", aSentence[20]);
- assertEquals("XP", aTags[20]);
+ Assertions.assertEquals("\"", aSentence[20]);
+ Assertions.assertEquals("XP", aTags[20]);
- assertEquals(".", aSentence[21]);
- assertEquals("XP", aTags[21]);
+ Assertions.assertEquals(".", aSentence[21]);
+ Assertions.assertEquals("XP", aTags[21]);
POSSample b = sampleStream.read();
String[] bSentence = b.getSentence();
String[] bTags = b.getTags();
- assertEquals(12, bSentence.length);
- assertEquals(12, bTags.length);
+ Assertions.assertEquals(12, bSentence.length);
+ Assertions.assertEquals(12, bTags.length);
- assertEquals("De", bSentence[0]);
- assertEquals("PP", bTags[0]);
+ Assertions.assertEquals("De", bSentence[0]);
+ Assertions.assertEquals("PP", bTags[0]);
- assertEquals("hævder", bSentence[1]);
- assertEquals("VA", bTags[1]);
+ Assertions.assertEquals("hævder", bSentence[1]);
+ Assertions.assertEquals("VA", bTags[1]);
- assertEquals(",", bSentence[2]);
- assertEquals("XP", bTags[2]);
+ Assertions.assertEquals(",", bSentence[2]);
+ Assertions.assertEquals("XP", bTags[2]);
- assertEquals("at", bSentence[3]);
- assertEquals("CS", bTags[3]);
+ Assertions.assertEquals("at", bSentence[3]);
+ Assertions.assertEquals("CS", bTags[3]);
- assertEquals("Ruslands", bSentence[4]);
- assertEquals("NP", bTags[4]);
+ Assertions.assertEquals("Ruslands", bSentence[4]);
+ Assertions.assertEquals("NP", bTags[4]);
- assertEquals("vej", bSentence[5]);
- assertEquals("NC", bTags[5]);
+ Assertions.assertEquals("vej", bSentence[5]);
+ Assertions.assertEquals("NC", bTags[5]);
- assertEquals("til", bSentence[6]);
- assertEquals("SP", bTags[6]);
+ Assertions.assertEquals("til", bSentence[6]);
+ Assertions.assertEquals("SP", bTags[6]);
- assertEquals("demokrati", bSentence[7]);
- assertEquals("NC", bTags[7]);
+ Assertions.assertEquals("demokrati", bSentence[7]);
+ Assertions.assertEquals("NC", bTags[7]);
- assertEquals("går", bSentence[8]);
- assertEquals("VA", bTags[8]);
+ Assertions.assertEquals("går", bSentence[8]);
+ Assertions.assertEquals("VA", bTags[8]);
- assertEquals("gennem", bSentence[9]);
- assertEquals("SP", bTags[9]);
+ Assertions.assertEquals("gennem", bSentence[9]);
+ Assertions.assertEquals("SP", bTags[9]);
- assertEquals("diktatur", bSentence[10]);
- assertEquals("NC", bTags[10]);
+ Assertions.assertEquals("diktatur", bSentence[10]);
+ Assertions.assertEquals("NC", bTags[10]);
- assertEquals(".", bSentence[11]);
- assertEquals("XP", bTags[11]);
+ Assertions.assertEquals(".", bSentence[11]);
+ Assertions.assertEquals("XP", bTags[11]);
- assertNull(sampleStream.read());
+ Assertions.assertNull(sampleStream.read());
}
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/DirectorySampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/DirectorySampleStreamTest.java
index d17188e4..ba06ae1e 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/DirectorySampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/DirectorySampleStreamTest.java
@@ -20,18 +20,20 @@ package opennlp.tools.formats;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
+import java.util.UUID;
-import org.junit.Assert;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
public class DirectorySampleStreamTest {
-
- @Rule
- public TemporaryFolder tempDirectory = new TemporaryFolder();
+
+ @TempDir
+ Path tempDirectory;
@Test
public void directoryTest() throws IOException {
@@ -40,22 +42,22 @@ public class DirectorySampleStreamTest {
List<File> files = new ArrayList<>();
- File temp1 = tempDirectory.newFile();
+ File temp1 = createTempFile();
files.add(temp1);
- File temp2 = tempDirectory.newFile();
+ File temp2 = createTempFile();
files.add(temp2);
- DirectorySampleStream stream = new DirectorySampleStream(tempDirectory.getRoot(), filter, false);
+ DirectorySampleStream stream = new DirectorySampleStream(tempDirectory.toFile(), filter, false);
File file = stream.read();
- Assert.assertTrue(files.contains(file));
+ Assertions.assertTrue(files.contains(file));
file = stream.read();
- Assert.assertTrue(files.contains(file));
+ Assertions.assertTrue(files.contains(file));
file = stream.read();
- Assert.assertNull(file);
+ Assertions.assertNull(file);
stream.close();
@@ -66,22 +68,22 @@ public class DirectorySampleStreamTest {
List<File> files = new ArrayList<>();
- File temp1 = tempDirectory.newFile();
+ File temp1 = createTempFile();
files.add(temp1);
- File temp2 = tempDirectory.newFile();
+ File temp2 = createTempFile();
files.add(temp2);
- DirectorySampleStream stream = new DirectorySampleStream(tempDirectory.getRoot(), null, false);
+ DirectorySampleStream stream = new DirectorySampleStream(tempDirectory.toFile(), null, false);
File file = stream.read();
- Assert.assertTrue(files.contains(file));
+ Assertions.assertTrue(files.contains(file));
file = stream.read();
- Assert.assertTrue(files.contains(file));
+ Assertions.assertTrue(files.contains(file));
file = stream.read();
- Assert.assertNull(file);
+ Assertions.assertNull(file);
stream.close();
@@ -94,23 +96,23 @@ public class DirectorySampleStreamTest {
List<File> files = new ArrayList<>();
- File temp1 = tempDirectory.newFile();
+ File temp1 = createTempFile();
files.add(temp1);
- File tempSubDirectory = tempDirectory.newFolder("sub1");
+ File tempSubDirectory = createTempFolder("sub1");
File temp2 = File.createTempFile("sub1", ".tmp", tempSubDirectory);
files.add(temp2);
- DirectorySampleStream stream = new DirectorySampleStream(tempDirectory.getRoot(), filter, true);
+ DirectorySampleStream stream = new DirectorySampleStream(tempDirectory.toFile(), filter, true);
File file = stream.read();
- Assert.assertTrue(files.contains(file));
+ Assertions.assertTrue(files.contains(file));
file = stream.read();
- Assert.assertTrue(files.contains(file));
+ Assertions.assertTrue(files.contains(file));
file = stream.read();
- Assert.assertNull(file);
+ Assertions.assertNull(file);
stream.close();
@@ -123,27 +125,27 @@ public class DirectorySampleStreamTest {
List<File> files = new ArrayList<>();
- File temp1 = tempDirectory.newFile();
+ File temp1 = createTempFile();
files.add(temp1);
- File temp2 = tempDirectory.newFile();
+ File temp2 = createTempFile();
files.add(temp2);
- DirectorySampleStream stream = new DirectorySampleStream(tempDirectory.getRoot(), filter, false);
+ DirectorySampleStream stream = new DirectorySampleStream(tempDirectory.toFile(), filter, false);
File file = stream.read();
- Assert.assertTrue(files.contains(file));
+ Assertions.assertTrue(files.contains(file));
stream.reset();
file = stream.read();
- Assert.assertTrue(files.contains(file));
+ Assertions.assertTrue(files.contains(file));
file = stream.read();
- Assert.assertTrue(files.contains(file));
+ Assertions.assertTrue(files.contains(file));
file = stream.read();
- Assert.assertNull(file);
+ Assertions.assertNull(file);
stream.close();
@@ -154,25 +156,53 @@ public class DirectorySampleStreamTest {
FileFilter filter = new TempFileNameFilter();
- DirectorySampleStream stream = new DirectorySampleStream(tempDirectory.getRoot(), filter, false);
+ DirectorySampleStream stream = new DirectorySampleStream(tempDirectory.toFile(), filter, false);
- Assert.assertNull(stream.read());
+ Assertions.assertNull(stream.read());
stream.close();
}
-
- @Test(expected = IllegalArgumentException.class)
- public void invalidDirectoryTest() throws IOException {
- FileFilter filter = new TempFileNameFilter();
-
- DirectorySampleStream stream = new DirectorySampleStream(tempDirectory.newFile(), filter, false);
-
- Assert.assertNull(stream.read());
-
- stream.close();
-
+ @Test
+ public void invalidDirectoryTest() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ FileFilter filter = new TempFileNameFilter();
+
+ DirectorySampleStream stream = new DirectorySampleStream(createTempFile(), filter, false);
+
+ Assertions.assertNull(stream.read());
+
+ stream.close();
+ });
+ }
+
+ private File createTempFolder(String name) {
+
+ Path subDir = tempDirectory.resolve(name);
+
+ try {
+ Files.createDirectory(subDir);
+ } catch (IOException e) {
+ throw new IllegalStateException(
+ "Could not create sub directory " + subDir.toFile().getAbsolutePath(), e);
+ }
+ return subDir.toFile();
+
+ }
+
+ private File createTempFile() {
+
+ Path tempFile = tempDirectory.resolve(UUID.randomUUID() + ".tmp");
+
+ try {
+ Files.createFile(tempFile);
+ } catch (IOException e) {
+ throw new IllegalStateException(
+ "Could not create file " + tempFile.toFile().getAbsolutePath(), e);
+ }
+ return tempFile.toFile();
+
}
class TempFileNameFilter implements FileFilter {
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/EvalitaNameSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/EvalitaNameSampleStreamTest.java
index bbb720bc..ab52031c 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/EvalitaNameSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/EvalitaNameSampleStreamTest.java
@@ -19,8 +19,8 @@ package opennlp.tools.formats;
import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.EvalitaNameSampleStream.LANGUAGE;
import opennlp.tools.namefind.NameSample;
@@ -42,33 +42,33 @@ public class EvalitaNameSampleStreamTest {
}
@Test
- public void testParsingItalianSample() throws IOException {
+ void testParsingItalianSample() throws IOException {
ObjectStream<NameSample> sampleStream = openData(LANGUAGE.IT, "evalita-ner-it.sample");
NameSample personName = sampleStream.read();
- Assert.assertNotNull(personName);
+ Assertions.assertNotNull(personName);
- Assert.assertEquals(11, personName.getSentence().length);
- Assert.assertEquals(1, personName.getNames().length);
- Assert.assertEquals(true, personName.isClearAdaptiveDataSet());
+ Assertions.assertEquals(11, personName.getSentence().length);
+ Assertions.assertEquals(1, personName.getNames().length);
+ Assertions.assertEquals(true, personName.isClearAdaptiveDataSet());
Span nameSpan = personName.getNames()[0];
- Assert.assertEquals(8, nameSpan.getStart());
- Assert.assertEquals(10, nameSpan.getEnd());
- Assert.assertEquals(true, personName.isClearAdaptiveDataSet());
+ Assertions.assertEquals(8, nameSpan.getStart());
+ Assertions.assertEquals(10, nameSpan.getEnd());
+ Assertions.assertEquals(true, personName.isClearAdaptiveDataSet());
- Assert.assertEquals(0, sampleStream.read().getNames().length);
+ Assertions.assertEquals(0, sampleStream.read().getNames().length);
- Assert.assertNull(sampleStream.read());
+ Assertions.assertNull(sampleStream.read());
}
@Test
- public void testReset() throws IOException {
+ void testReset() throws IOException {
ObjectStream<NameSample> sampleStream = openData(LANGUAGE.IT, "evalita-ner-it.sample");
NameSample sample = sampleStream.read();
sampleStream.reset();
- Assert.assertEquals(sample, sampleStream.read());
+ Assertions.assertEquals(sample, sampleStream.read());
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/NameFinderCensus90NameStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/NameFinderCensus90NameStreamTest.java
index 6df41ed6..b5b3be05 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/NameFinderCensus90NameStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/NameFinderCensus90NameStreamTest.java
@@ -20,16 +20,13 @@ package opennlp.tools.formats;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.util.InputStreamFactory;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.StringList;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-
public class NameFinderCensus90NameStreamTest {
private static ObjectStream<StringList> openData(String name)
@@ -42,65 +39,65 @@ public class NameFinderCensus90NameStreamTest {
}
@Test
- public void testParsingEnglishSample() throws IOException {
+ void testParsingEnglishSample() throws IOException {
ObjectStream<StringList> sampleStream = openData("census90.sample");
StringList personName = sampleStream.read();
// verify the first 5 taken from the Surname data
- assertNotNull(personName);
- assertEquals("Smith", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("Smith", personName.getToken(0));
personName = sampleStream.read();
- assertNotNull(personName);
- assertEquals("Johnson", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("Johnson", personName.getToken(0));
personName = sampleStream.read();
- assertNotNull(personName);
- assertEquals("Williams", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("Williams", personName.getToken(0));
personName = sampleStream.read();
- assertNotNull(personName);
- assertEquals("Jones", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("Jones", personName.getToken(0));
personName = sampleStream.read();
- assertNotNull(personName);
- assertEquals("Brown", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("Brown", personName.getToken(0));
// verify the next 5 taken from the female names
personName = sampleStream.read();
- assertNotNull(personName);
- assertEquals("Mary", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("Mary", personName.getToken(0));
personName = sampleStream.read();
- assertNotNull(personName);
- assertEquals("Patricia", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("Patricia", personName.getToken(0));
personName = sampleStream.read();
- assertNotNull(personName);
- assertEquals("Linda", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("Linda", personName.getToken(0));
personName = sampleStream.read();
- assertNotNull(personName);
- assertEquals("Barbara", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("Barbara", personName.getToken(0));
personName = sampleStream.read();
- assertNotNull(personName);
- assertEquals("Elizabeth", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("Elizabeth", personName.getToken(0));
// verify the last 5 taken from the male names
personName = sampleStream.read();
- assertNotNull(personName);
- assertEquals("James", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("James", personName.getToken(0));
personName = sampleStream.read();
- assertNotNull(personName);
- assertEquals("John", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("John", personName.getToken(0));
personName = sampleStream.read();
- assertNotNull(personName);
- assertEquals("Robert", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("Robert", personName.getToken(0));
personName = sampleStream.read();
- assertNotNull(personName);
- assertEquals("Michael", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("Michael", personName.getToken(0));
personName = sampleStream.read();
- assertNotNull(personName);
- assertEquals("William", personName.getToken(0));
+ Assertions.assertNotNull(personName);
+ Assertions.assertEquals("William", personName.getToken(0));
// verify the end of the file.
personName = sampleStream.read();
- assertNull(personName);
+ Assertions.assertNull(personName);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/ResourceAsStreamFactory.java b/opennlp-tools/src/test/java/opennlp/tools/formats/ResourceAsStreamFactory.java
index 20392403..7e02fe79 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/ResourceAsStreamFactory.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/ResourceAsStreamFactory.java
@@ -17,7 +17,6 @@
package opennlp.tools.formats;
-import java.io.IOException;
import java.io.InputStream;
import java.util.Objects;
@@ -34,7 +33,7 @@ public class ResourceAsStreamFactory implements InputStreamFactory {
}
@Override
- public InputStream createInputStream() throws IOException {
+ public InputStream createInputStream() {
return clazz.getResourceAsStream(name);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADChunkSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADChunkSampleStreamTest.java
index 6c1886f0..7435ec83 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADChunkSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADChunkSampleStreamTest.java
@@ -22,9 +22,9 @@ import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import opennlp.tools.chunker.ChunkSample;
import opennlp.tools.formats.ResourceAsStreamFactory;
@@ -36,41 +36,41 @@ public class ADChunkSampleStreamTest {
private List<ChunkSample> samples = new ArrayList<>();
@Test
- public void testSimpleCount() {
- Assert.assertEquals(ADParagraphStreamTest.NUM_SENTENCES, samples.size());
+ void testSimpleCount() {
+ Assertions.assertEquals(ADParagraphStreamTest.NUM_SENTENCES, samples.size());
}
@Test
- public void testChunks() {
+ void testChunks() {
- Assert.assertEquals("Inicia", samples.get(0).getSentence()[0]);
- Assert.assertEquals("v-fin", samples.get(0).getTags()[0]);
- Assert.assertEquals("B-VP", samples.get(0).getPreds()[0]);
+ Assertions.assertEquals("Inicia", samples.get(0).getSentence()[0]);
+ Assertions.assertEquals("v-fin", samples.get(0).getTags()[0]);
+ Assertions.assertEquals("B-VP", samples.get(0).getPreds()[0]);
- Assert.assertEquals("em", samples.get(0).getSentence()[1]);
- Assert.assertEquals("prp", samples.get(0).getTags()[1]);
- Assert.assertEquals("B-PP", samples.get(0).getPreds()[1]);
+ Assertions.assertEquals("em", samples.get(0).getSentence()[1]);
+ Assertions.assertEquals("prp", samples.get(0).getTags()[1]);
+ Assertions.assertEquals("B-PP", samples.get(0).getPreds()[1]);
- Assert.assertEquals("o", samples.get(0).getSentence()[2]);
- Assert.assertEquals("art", samples.get(0).getTags()[2]);
- Assert.assertEquals("B-NP", samples.get(0).getPreds()[2]);
+ Assertions.assertEquals("o", samples.get(0).getSentence()[2]);
+ Assertions.assertEquals("art", samples.get(0).getTags()[2]);
+ Assertions.assertEquals("B-NP", samples.get(0).getPreds()[2]);
- Assert.assertEquals("próximo", samples.get(0).getSentence()[3]);
- Assert.assertEquals("adj", samples.get(0).getTags()[3]);
- Assert.assertEquals("I-NP", samples.get(0).getPreds()[3]);
+ Assertions.assertEquals("próximo", samples.get(0).getSentence()[3]);
+ Assertions.assertEquals("adj", samples.get(0).getTags()[3]);
+ Assertions.assertEquals("I-NP", samples.get(0).getPreds()[3]);
- Assert.assertEquals("Casas", samples.get(3).getSentence()[0]);
- Assert.assertEquals("n", samples.get(3).getTags()[0]);
- Assert.assertEquals("B-NP", samples.get(3).getPreds()[0]);
+ Assertions.assertEquals("Casas", samples.get(3).getSentence()[0]);
+ Assertions.assertEquals("n", samples.get(3).getTags()[0]);
+ Assertions.assertEquals("B-NP", samples.get(3).getPreds()[0]);
}
- @Before
- public void setup() throws IOException {
+ @BeforeEach
+ void setup() throws IOException {
InputStreamFactory in = new ResourceAsStreamFactory(
ADParagraphStreamTest.class, "/opennlp/tools/formats/ad.sample");
try (ADChunkSampleStream stream = new ADChunkSampleStream(new PlainTextByLineStream(in,
- StandardCharsets.UTF_8))) {
+ StandardCharsets.UTF_8))) {
ChunkSample sample;
while ((sample = stream.read()) != null) {
samples.add(sample);
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADNameSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADNameSampleStreamTest.java
index 7c2fa044..69db07ee 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADNameSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADNameSampleStreamTest.java
@@ -22,9 +22,9 @@ import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.namefind.NameSample;
@@ -37,86 +37,86 @@ public class ADNameSampleStreamTest {
private List<NameSample> samples = new ArrayList<>();
@Test
- public void testSimpleCount() throws IOException {
- Assert.assertEquals(ADParagraphStreamTest.NUM_SENTENCES, samples.size());
+ void testSimpleCount() {
+ Assertions.assertEquals(ADParagraphStreamTest.NUM_SENTENCES, samples.size());
}
@Test
- public void testCheckMergedContractions() throws IOException {
-
- Assert.assertEquals("no", samples.get(0).getSentence()[1]);
- Assert.assertEquals("no", samples.get(0).getSentence()[11]);
- Assert.assertEquals("Com", samples.get(1).getSentence()[0]);
- Assert.assertEquals("relação", samples.get(1).getSentence()[1]);
- Assert.assertEquals("à", samples.get(1).getSentence()[2]);
- Assert.assertEquals("mais", samples.get(2).getSentence()[4]);
- Assert.assertEquals("de", samples.get(2).getSentence()[5]);
- Assert.assertEquals("da", samples.get(2).getSentence()[8]);
- Assert.assertEquals("num", samples.get(3).getSentence()[26]);
+ void testCheckMergedContractions() {
+
+ Assertions.assertEquals("no", samples.get(0).getSentence()[1]);
+ Assertions.assertEquals("no", samples.get(0).getSentence()[11]);
+ Assertions.assertEquals("Com", samples.get(1).getSentence()[0]);
+ Assertions.assertEquals("relação", samples.get(1).getSentence()[1]);
+ Assertions.assertEquals("à", samples.get(1).getSentence()[2]);
+ Assertions.assertEquals("mais", samples.get(2).getSentence()[4]);
+ Assertions.assertEquals("de", samples.get(2).getSentence()[5]);
+ Assertions.assertEquals("da", samples.get(2).getSentence()[8]);
+ Assertions.assertEquals("num", samples.get(3).getSentence()[26]);
}
@Test
- public void testSize() throws IOException {
- Assert.assertEquals(25, samples.get(0).getSentence().length);
- Assert.assertEquals(12, samples.get(1).getSentence().length);
- Assert.assertEquals(59, samples.get(2).getSentence().length);
- Assert.assertEquals(33, samples.get(3).getSentence().length);
+ void testSize() {
+ Assertions.assertEquals(25, samples.get(0).getSentence().length);
+ Assertions.assertEquals(12, samples.get(1).getSentence().length);
+ Assertions.assertEquals(59, samples.get(2).getSentence().length);
+ Assertions.assertEquals(33, samples.get(3).getSentence().length);
}
@Test
- public void testNames() throws IOException {
-
- Assert.assertEquals(new Span(4, 7, "time"), samples.get(0).getNames()[0]);
- Assert.assertEquals(new Span(8, 10, "place"), samples.get(0).getNames()[1]);
- Assert.assertEquals(new Span(12, 14, "place"), samples.get(0).getNames()[2]);
- Assert.assertEquals(new Span(15, 17, "person"), samples.get(0).getNames()[3]);
- Assert.assertEquals(new Span(18, 19, "numeric"), samples.get(0).getNames()[4]);
- Assert.assertEquals(new Span(20, 22, "place"), samples.get(0).getNames()[5]);
- Assert.assertEquals(new Span(23, 24, "place"), samples.get(0).getNames()[6]);
-
- Assert.assertEquals(new Span(22, 24, "person"), samples.get(2).getNames()[0]);// 22..24
- Assert.assertEquals(new Span(25, 27, "person"), samples.get(2).getNames()[1]);// 25..27
- Assert.assertEquals(new Span(28, 30, "person"), samples.get(2).getNames()[2]);// 28..30
- Assert.assertEquals(new Span(31, 34, "person"), samples.get(2).getNames()[3]);// 31..34
- Assert.assertEquals(new Span(35, 37, "person"), samples.get(2).getNames()[4]);// 35..37
- Assert.assertEquals(new Span(38, 40, "person"), samples.get(2).getNames()[5]);// 38..40
- Assert.assertEquals(new Span(41, 43, "person"), samples.get(2).getNames()[6]);// 41..43
- Assert.assertEquals(new Span(44, 46, "person"), samples.get(2).getNames()[7]);// 44..46
- Assert.assertEquals(new Span(47, 49, "person"), samples.get(2).getNames()[8]);// 47..49
- Assert.assertEquals(new Span(50, 52, "person"), samples.get(2).getNames()[9]);// 50..52
- Assert.assertEquals(new Span(53, 55, "person"), samples.get(2).getNames()[10]);// 53..55
-
- Assert.assertEquals(new Span(0, 1, "place"), samples.get(3).getNames()[0]);// 0..1
- Assert.assertEquals(new Span(6, 7, "event"), samples.get(3).getNames()[1]);// 6..7
- Assert.assertEquals(new Span(15, 16, "organization"), samples.get(3).getNames()[2]);// 15..16
- Assert.assertEquals(new Span(18, 19, "event"), samples.get(3).getNames()[3]);// 18..19
- Assert.assertEquals(new Span(27, 28, "event"), samples.get(3).getNames()[4]);// 27..28
- Assert.assertEquals(new Span(29, 30, "event"), samples.get(3).getNames()[5]);// 29..30
-
- Assert.assertEquals(new Span(1, 6, "time"), samples.get(4).getNames()[0]);// 0..1
- Assert.assertEquals(new Span(0, 3, "person"), samples.get(5).getNames()[0]);// 0..1
+ void testNames() {
+
+ Assertions.assertEquals(new Span(4, 7, "time"), samples.get(0).getNames()[0]);
+ Assertions.assertEquals(new Span(8, 10, "place"), samples.get(0).getNames()[1]);
+ Assertions.assertEquals(new Span(12, 14, "place"), samples.get(0).getNames()[2]);
+ Assertions.assertEquals(new Span(15, 17, "person"), samples.get(0).getNames()[3]);
+ Assertions.assertEquals(new Span(18, 19, "numeric"), samples.get(0).getNames()[4]);
+ Assertions.assertEquals(new Span(20, 22, "place"), samples.get(0).getNames()[5]);
+ Assertions.assertEquals(new Span(23, 24, "place"), samples.get(0).getNames()[6]);
+
+ Assertions.assertEquals(new Span(22, 24, "person"), samples.get(2).getNames()[0]);// 22..24
+ Assertions.assertEquals(new Span(25, 27, "person"), samples.get(2).getNames()[1]);// 25..27
+ Assertions.assertEquals(new Span(28, 30, "person"), samples.get(2).getNames()[2]);// 28..30
+ Assertions.assertEquals(new Span(31, 34, "person"), samples.get(2).getNames()[3]);// 31..34
+ Assertions.assertEquals(new Span(35, 37, "person"), samples.get(2).getNames()[4]);// 35..37
+ Assertions.assertEquals(new Span(38, 40, "person"), samples.get(2).getNames()[5]);// 38..40
+ Assertions.assertEquals(new Span(41, 43, "person"), samples.get(2).getNames()[6]);// 41..43
+ Assertions.assertEquals(new Span(44, 46, "person"), samples.get(2).getNames()[7]);// 44..46
+ Assertions.assertEquals(new Span(47, 49, "person"), samples.get(2).getNames()[8]);// 47..49
+ Assertions.assertEquals(new Span(50, 52, "person"), samples.get(2).getNames()[9]);// 50..52
+ Assertions.assertEquals(new Span(53, 55, "person"), samples.get(2).getNames()[10]);// 53..55
+
+ Assertions.assertEquals(new Span(0, 1, "place"), samples.get(3).getNames()[0]);// 0..1
+ Assertions.assertEquals(new Span(6, 7, "event"), samples.get(3).getNames()[1]);// 6..7
+ Assertions.assertEquals(new Span(15, 16, "organization"), samples.get(3).getNames()[2]);// 15..16
+ Assertions.assertEquals(new Span(18, 19, "event"), samples.get(3).getNames()[3]);// 18..19
+ Assertions.assertEquals(new Span(27, 28, "event"), samples.get(3).getNames()[4]);// 27..28
+ Assertions.assertEquals(new Span(29, 30, "event"), samples.get(3).getNames()[5]);// 29..30
+
+ Assertions.assertEquals(new Span(1, 6, "time"), samples.get(4).getNames()[0]);// 0..1
+ Assertions.assertEquals(new Span(0, 3, "person"), samples.get(5).getNames()[0]);// 0..1
}
@Test
- public void testSmallSentence() throws IOException {
- Assert.assertEquals(2, samples.get(6).getSentence().length);
+ void testSmallSentence() {
+ Assertions.assertEquals(2, samples.get(6).getSentence().length);
}
@Test
- public void testMissingRightContraction() throws IOException {
- Assert.assertEquals(new Span(0, 1, "person"), samples.get(7).getNames()[0]);
- Assert.assertEquals(new Span(3, 4, "person"), samples.get(7).getNames()[1]);
- Assert.assertEquals(new Span(5, 6, "person"), samples.get(7).getNames()[2]);
+ void testMissingRightContraction() {
+ Assertions.assertEquals(new Span(0, 1, "person"), samples.get(7).getNames()[0]);
+ Assertions.assertEquals(new Span(3, 4, "person"), samples.get(7).getNames()[1]);
+ Assertions.assertEquals(new Span(5, 6, "person"), samples.get(7).getNames()[2]);
}
- @Before
- public void setup() throws IOException {
+ @BeforeEach
+ void setup() throws IOException {
InputStreamFactory in = new ResourceAsStreamFactory(ADParagraphStreamTest.class,
"/opennlp/tools/formats/ad.sample");
try (ADNameSampleStream stream =
- new ADNameSampleStream(new PlainTextByLineStream(in, StandardCharsets.UTF_8), true)) {
+ new ADNameSampleStream(new PlainTextByLineStream(in, StandardCharsets.UTF_8), true)) {
NameSample sample;
while ((sample = stream.read()) != null) {
samples.add(sample);
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADPOSSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADPOSSampleStreamTest.java
index 275c7749..f81f21e2 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADPOSSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADPOSSampleStreamTest.java
@@ -20,8 +20,8 @@ package opennlp.tools.formats.ad;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.postag.POSSample;
@@ -30,82 +30,82 @@ import opennlp.tools.util.PlainTextByLineStream;
public class ADPOSSampleStreamTest {
@Test
- public void testSimple() throws IOException {
+ void testSimple() throws IOException {
// add one sentence with expandME = includeFeats = false
try (ADPOSSampleStream stream = new ADPOSSampleStream(
new PlainTextByLineStream(new ResourceAsStreamFactory(
ADParagraphStreamTest.class, "/opennlp/tools/formats/ad.sample"),
- StandardCharsets.UTF_8), false, false)) {
+ StandardCharsets.UTF_8), false, false)) {
POSSample sample = stream.read();
- Assert.assertEquals(23, sample.getSentence().length);
+ Assertions.assertEquals(23, sample.getSentence().length);
- Assert.assertEquals("Inicia", sample.getSentence()[0]);
- Assert.assertEquals("v-fin", sample.getTags()[0]);
+ Assertions.assertEquals("Inicia", sample.getSentence()[0]);
+ Assertions.assertEquals("v-fin", sample.getTags()[0]);
- Assert.assertEquals("em", sample.getSentence()[1]);
- Assert.assertEquals("prp", sample.getTags()[1]);
+ Assertions.assertEquals("em", sample.getSentence()[1]);
+ Assertions.assertEquals("prp", sample.getTags()[1]);
- Assert.assertEquals("o", sample.getSentence()[2]);
- Assert.assertEquals("art", sample.getTags()[2]);
+ Assertions.assertEquals("o", sample.getSentence()[2]);
+ Assertions.assertEquals("art", sample.getTags()[2]);
- Assert.assertEquals("Porto_Poesia", sample.getSentence()[9]);
- Assert.assertEquals("prop", sample.getTags()[9]);
+ Assertions.assertEquals("Porto_Poesia", sample.getSentence()[9]);
+ Assertions.assertEquals("prop", sample.getTags()[9]);
}
}
@Test
- public void testExpandME() throws IOException {
+ void testExpandME() throws IOException {
// add one sentence with expandME = true
try (ADPOSSampleStream stream = new ADPOSSampleStream(
new PlainTextByLineStream(new ResourceAsStreamFactory(
ADParagraphStreamTest.class, "/opennlp/tools/formats/ad.sample"),
- StandardCharsets.UTF_8), true, false)) {
+ StandardCharsets.UTF_8), true, false)) {
POSSample sample = stream.read();
- Assert.assertEquals(27, sample.getSentence().length);
+ Assertions.assertEquals(27, sample.getSentence().length);
- Assert.assertEquals("Inicia", sample.getSentence()[0]);
- Assert.assertEquals("v-fin", sample.getTags()[0]);
+ Assertions.assertEquals("Inicia", sample.getSentence()[0]);
+ Assertions.assertEquals("v-fin", sample.getTags()[0]);
- Assert.assertEquals("em", sample.getSentence()[1]);
- Assert.assertEquals("prp", sample.getTags()[1]);
+ Assertions.assertEquals("em", sample.getSentence()[1]);
+ Assertions.assertEquals("prp", sample.getTags()[1]);
- Assert.assertEquals("o", sample.getSentence()[2]);
- Assert.assertEquals("art", sample.getTags()[2]);
+ Assertions.assertEquals("o", sample.getSentence()[2]);
+ Assertions.assertEquals("art", sample.getTags()[2]);
- Assert.assertEquals("Porto", sample.getSentence()[9]);
- Assert.assertEquals("B-prop", sample.getTags()[9]);
+ Assertions.assertEquals("Porto", sample.getSentence()[9]);
+ Assertions.assertEquals("B-prop", sample.getTags()[9]);
- Assert.assertEquals("Poesia", sample.getSentence()[10]);
- Assert.assertEquals("I-prop", sample.getTags()[10]);
+ Assertions.assertEquals("Poesia", sample.getSentence()[10]);
+ Assertions.assertEquals("I-prop", sample.getTags()[10]);
}
}
@Test
- public void testIncludeFeats() throws IOException {
+ void testIncludeFeats() throws IOException {
// add one sentence with includeFeats = true
try (ADPOSSampleStream stream = new ADPOSSampleStream(
new PlainTextByLineStream(new ResourceAsStreamFactory(
ADParagraphStreamTest.class, "/opennlp/tools/formats/ad.sample"),
- StandardCharsets.UTF_8), false, true)) {
+ StandardCharsets.UTF_8), false, true)) {
POSSample sample = stream.read();
- Assert.assertEquals(23, sample.getSentence().length);
+ Assertions.assertEquals(23, sample.getSentence().length);
- Assert.assertEquals("Inicia", sample.getSentence()[0]);
- Assert.assertEquals("v-fin=PR=3S=IND=VFIN", sample.getTags()[0]);
+ Assertions.assertEquals("Inicia", sample.getSentence()[0]);
+ Assertions.assertEquals("v-fin=PR=3S=IND=VFIN", sample.getTags()[0]);
- Assert.assertEquals("em", sample.getSentence()[1]);
- Assert.assertEquals("prp", sample.getTags()[1]);
+ Assertions.assertEquals("em", sample.getSentence()[1]);
+ Assertions.assertEquals("prp", sample.getTags()[1]);
- Assert.assertEquals("o", sample.getSentence()[2]);
- Assert.assertEquals("art=DET=M=S", sample.getTags()[2]);
+ Assertions.assertEquals("o", sample.getSentence()[2]);
+ Assertions.assertEquals("art=DET=M=S", sample.getTags()[2]);
- Assert.assertEquals("Porto_Poesia", sample.getSentence()[9]);
- Assert.assertEquals("prop=M=S", sample.getTags()[9]);
+ Assertions.assertEquals("Porto_Poesia", sample.getSentence()[9]);
+ Assertions.assertEquals("prop=M=S", sample.getTags()[9]);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADParagraphStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADParagraphStreamTest.java
index e7544085..6e7554d1 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADParagraphStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADParagraphStreamTest.java
@@ -20,8 +20,8 @@ package opennlp.tools.formats.ad;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.util.InputStreamFactory;
@@ -32,7 +32,7 @@ public class ADParagraphStreamTest {
public static final int NUM_SENTENCES = 8;
@Test
- public void testSimpleReading() throws IOException {
+ void testSimpleReading() throws IOException {
int count = 0;
ADSentenceStream stream = openData();
@@ -45,11 +45,11 @@ public class ADParagraphStreamTest {
// paragraph.getRoot();
}
- Assert.assertEquals(ADParagraphStreamTest.NUM_SENTENCES, count);
+ Assertions.assertEquals(ADParagraphStreamTest.NUM_SENTENCES, count);
}
@Test
- public void testLeadingWithContraction() throws IOException {
+ void testLeadingWithContraction() throws IOException {
int count = 0;
ADSentenceStream stream = openData();
@@ -61,7 +61,7 @@ public class ADParagraphStreamTest {
paragraph = stream.read();
}
- Assert.assertEquals(ADParagraphStreamTest.NUM_SENTENCES, count);
+ Assertions.assertEquals(ADParagraphStreamTest.NUM_SENTENCES, count);
}
private static ADSentenceStream openData() throws IOException {
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADSentenceSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADSentenceSampleStreamTest.java
index 1268dd1c..ce043f2c 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADSentenceSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADSentenceSampleStreamTest.java
@@ -22,9 +22,9 @@ import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.sentdetect.SentenceSample;
@@ -37,26 +37,26 @@ public class ADSentenceSampleStreamTest {
private List<SentenceSample> samples = new ArrayList<>();
@Test
- public void testSimpleCount() throws IOException {
- Assert.assertEquals(5, samples.size());
+ void testSimpleCount() {
+ Assertions.assertEquals(5, samples.size());
}
@Test
- public void testSentences() throws IOException {
+ void testSentences() {
- Assert.assertNotNull(samples.get(0).getDocument());
- Assert.assertEquals(3, samples.get(0).getSentences().length);
- Assert.assertEquals(new Span(0, 119), samples.get(0).getSentences()[0]);
- Assert.assertEquals(new Span(120, 180), samples.get(0).getSentences()[1]);
+ Assertions.assertNotNull(samples.get(0).getDocument());
+ Assertions.assertEquals(3, samples.get(0).getSentences().length);
+ Assertions.assertEquals(new Span(0, 119), samples.get(0).getSentences()[0]);
+ Assertions.assertEquals(new Span(120, 180), samples.get(0).getSentences()[1]);
}
- @Before
- public void setup() throws IOException {
+ @BeforeEach
+ void setup() throws IOException {
InputStreamFactory in = new ResourceAsStreamFactory(ADSentenceSampleStreamTest.class,
"/opennlp/tools/formats/ad.sample");
try (ADSentenceSampleStream stream = new ADSentenceSampleStream(
- new PlainTextByLineStream(in, StandardCharsets.UTF_8), true)) {
+ new PlainTextByLineStream(in, StandardCharsets.UTF_8), true)) {
SentenceSample sample;
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADTokenSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADTokenSampleStreamTest.java
index 06aa7b24..e37dc26d 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADTokenSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/ad/ADTokenSampleStreamTest.java
@@ -24,9 +24,9 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import opennlp.tools.tokenize.TokenSample;
import opennlp.tools.util.ObjectStream;
@@ -36,26 +36,26 @@ public class ADTokenSampleStreamTest {
private List<TokenSample> samples = new ArrayList<>();
@Test
- public void testSimpleCount() throws IOException {
- Assert.assertEquals(ADParagraphStreamTest.NUM_SENTENCES, samples.size());
+ void testSimpleCount() {
+ Assertions.assertEquals(ADParagraphStreamTest.NUM_SENTENCES, samples.size());
}
@Test
- public void testSentences() throws IOException {
- Assert.assertTrue(samples.get(5).getText().contains("ofereceu-me"));
+ void testSentences() {
+ Assertions.assertTrue(samples.get(5).getText().contains("ofereceu-me"));
}
- @Before
- public void setup() throws IOException, URISyntaxException {
+ @BeforeEach
+ void setup() throws IOException, URISyntaxException {
ADTokenSampleStreamFactory factory = new ADTokenSampleStreamFactory(
ADTokenSampleStreamFactory.Parameters.class);
File dict = new File(Objects.requireNonNull(getClass().getClassLoader()
- .getResource("opennlp/tools/tokenize/latin-detokenizer.xml")).toURI());
+ .getResource("opennlp/tools/tokenize/latin-detokenizer.xml")).toURI());
File data = new File(Objects.requireNonNull(getClass().getClassLoader()
- .getResource("opennlp/tools/formats/ad.sample")).toURI());
- String[] args = { "-data", data.getCanonicalPath(), "-encoding", "UTF-8",
- "-lang", "por", "-detokenizer", dict.getCanonicalPath() };
+ .getResource("opennlp/tools/formats/ad.sample")).toURI());
+ String[] args = {"-data", data.getCanonicalPath(), "-encoding", "UTF-8",
+ "-lang", "por", "-detokenizer", dict.getCanonicalPath()};
ObjectStream<TokenSample> tokenSampleStream = factory.create(args);
TokenSample sample = tokenSampleStream.read();
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratAnnotationStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratAnnotationStreamTest.java
index 02e625fa..80b9b3c8 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratAnnotationStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratAnnotationStreamTest.java
@@ -21,7 +21,7 @@ import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import opennlp.tools.util.ObjectStream;
@@ -42,7 +42,7 @@ public class BratAnnotationStreamTest {
}
@Test
- public void testParsingEntities() throws Exception {
+ void testParsingEntities() throws Exception {
Map<String, String> typeToClassMap = new HashMap<>();
addEntityTypes(typeToClassMap);
@@ -60,7 +60,7 @@ public class BratAnnotationStreamTest {
}
@Test
- public void testParsingRelations() throws Exception {
+ void testParsingRelations() throws Exception {
Map<String, String> typeToClassMap = new HashMap<>();
addEntityTypes(typeToClassMap);
typeToClassMap.put("Related", AnnotationConfiguration.RELATION_TYPE);
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratDocumentParserTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratDocumentParserTest.java
index 88908a61..59266b00 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratDocumentParserTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratDocumentParserTest.java
@@ -23,8 +23,8 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.namefind.NameSample;
import opennlp.tools.sentdetect.NewlineSentenceDetector;
@@ -33,7 +33,7 @@ import opennlp.tools.tokenize.WhitespaceTokenizer;
public class BratDocumentParserTest {
@Test
- public void testParse() throws IOException {
+ void testParse() throws IOException {
Map<String, String> typeToClassMap = new HashMap<>();
BratAnnotationStreamTest.addEntityTypes(typeToClassMap);
@@ -52,27 +52,27 @@ public class BratDocumentParserTest {
List<NameSample> names = parser.parse(doc);
- Assert.assertEquals(3, names.size());
+ Assertions.assertEquals(3, names.size());
NameSample sample1 = names.get(0);
- Assert.assertEquals(1, sample1.getNames().length);
- Assert.assertEquals(0, sample1.getNames()[0].getStart());
- Assert.assertEquals(2, sample1.getNames()[0].getEnd());
+ Assertions.assertEquals(1, sample1.getNames().length);
+ Assertions.assertEquals(0, sample1.getNames()[0].getStart());
+ Assertions.assertEquals(2, sample1.getNames()[0].getEnd());
NameSample sample2 = names.get(1);
- Assert.assertEquals(1, sample2.getNames().length);
- Assert.assertEquals(0, sample2.getNames()[0].getStart());
- Assert.assertEquals(1, sample2.getNames()[0].getEnd());
+ Assertions.assertEquals(1, sample2.getNames().length);
+ Assertions.assertEquals(0, sample2.getNames()[0].getStart());
+ Assertions.assertEquals(1, sample2.getNames()[0].getEnd());
NameSample sample3 = names.get(2);
- Assert.assertEquals(3, sample3.getNames().length);
- Assert.assertEquals(0, sample3.getNames()[0].getStart());
- Assert.assertEquals(1, sample3.getNames()[0].getEnd());
- Assert.assertEquals(1, sample3.getNames()[1].getStart());
- Assert.assertEquals(2, sample3.getNames()[1].getEnd());
- Assert.assertEquals(2, sample3.getNames()[2].getStart());
- Assert.assertEquals(3, sample3.getNames()[2].getEnd());
+ Assertions.assertEquals(3, sample3.getNames().length);
+ Assertions.assertEquals(0, sample3.getNames()[0].getStart());
+ Assertions.assertEquals(1, sample3.getNames()[0].getEnd());
+ Assertions.assertEquals(1, sample3.getNames()[1].getStart());
+ Assertions.assertEquals(2, sample3.getNames()[1].getEnd());
+ Assertions.assertEquals(2, sample3.getNames()[2].getStart());
+ Assertions.assertEquals(3, sample3.getNames()[2].getEnd());
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratDocumentTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratDocumentTest.java
index c808f2eb..9d079074 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratDocumentTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratDocumentTest.java
@@ -22,13 +22,13 @@ import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
public class BratDocumentTest {
@Test
- public void testDocumentWithEntitiesParsing() throws IOException {
+ void testDocumentWithEntitiesParsing() throws IOException {
Map<String, String> typeToClassMap = new HashMap<>();
BratAnnotationStreamTest.addEntityTypes(typeToClassMap);
@@ -42,32 +42,32 @@ public class BratDocumentTest {
BratDocument doc = BratDocument.parseDocument(config, "voa-with-entities", txtIn, annIn);
- Assert.assertEquals("voa-with-entities", doc.getId());
- Assert.assertTrue(doc.getText().startsWith(" U . S . President "));
- Assert.assertTrue(doc.getText().endsWith("multinational process . \n"));
+ Assertions.assertEquals("voa-with-entities", doc.getId());
+ Assertions.assertTrue(doc.getText().startsWith(" U . S . President "));
+ Assertions.assertTrue(doc.getText().endsWith("multinational process . \n"));
+
+ Assertions.assertEquals(18, doc.getAnnotations().size());
- Assert.assertEquals(18, doc.getAnnotations().size());
-
BratAnnotation annotation = doc.getAnnotation("T2");
checkNote(annotation, "Barack Obama", "President Obama was the 44th U.S. president");
annotation = doc.getAnnotation("T3");
- checkNote(annotation,"South Korea","The capital of South Korea is Seoul");
+ checkNote(annotation, "South Korea", "The capital of South Korea is Seoul");
}
-
+
private void checkNote(BratAnnotation annotation, String expectedCoveredText, String expectedNote) {
- Assert.assertTrue(annotation instanceof SpanAnnotation);
+ Assertions.assertTrue(annotation instanceof SpanAnnotation);
SpanAnnotation spanAnn = (SpanAnnotation) annotation;
- Assert.assertEquals(expectedCoveredText, spanAnn.getCoveredText());
- Assert.assertEquals(expectedNote, spanAnn.getNote());
+ Assertions.assertEquals(expectedCoveredText, spanAnn.getCoveredText());
+ Assertions.assertEquals(expectedNote, spanAnn.getNote());
}
/**
* Parse spans that have multiple fragments and ensure they are matched to the correct tokens.
- *
+ * <p>
* Test to ensure OPENNLP-1193 works.
*/
@Test
- public void testSpanWithMultiFragments() throws IOException {
+ void testSpanWithMultiFragments() throws IOException {
Map<String, String> typeToClassMap = new HashMap<>();
BratAnnotationStreamTest.addEntityTypes(typeToClassMap);
AnnotationConfiguration config = new AnnotationConfiguration(typeToClassMap);
@@ -81,17 +81,17 @@ public class BratDocumentTest {
BratDocument doc = BratDocument.parseDocument(config, "opennlp-1193", txtIn, annIn);
SpanAnnotation t1 = (SpanAnnotation) doc.getAnnotation("T1");
- Assert.assertEquals(t1.getSpans()[0].getStart(), 0);
- Assert.assertEquals(t1.getSpans()[0].getEnd(), 7);
- Assert.assertEquals(t1.getSpans()[1].getStart(), 8);
- Assert.assertEquals(t1.getSpans()[1].getEnd(), 15);
- Assert.assertEquals(t1.getSpans()[2].getStart(), 17);
- Assert.assertEquals(t1.getSpans()[2].getEnd(), 24);
+ Assertions.assertEquals(t1.getSpans()[0].getStart(), 0);
+ Assertions.assertEquals(t1.getSpans()[0].getEnd(), 7);
+ Assertions.assertEquals(t1.getSpans()[1].getStart(), 8);
+ Assertions.assertEquals(t1.getSpans()[1].getEnd(), 15);
+ Assertions.assertEquals(t1.getSpans()[2].getStart(), 17);
+ Assertions.assertEquals(t1.getSpans()[2].getEnd(), 24);
SpanAnnotation t2 = (SpanAnnotation) doc.getAnnotation("T2");
- Assert.assertEquals(t2.getSpans()[0].getStart(), 26);
- Assert.assertEquals(t2.getSpans()[0].getEnd(), 33);
- Assert.assertEquals(t2.getSpans()[1].getStart(), 40);
- Assert.assertEquals(t2.getSpans()[1].getEnd(), 47);
+ Assertions.assertEquals(t2.getSpans()[0].getStart(), 26);
+ Assertions.assertEquals(t2.getSpans()[0].getEnd(), 33);
+ Assertions.assertEquals(t2.getSpans()[1].getStart(), 40);
+ Assertions.assertEquals(t2.getSpans()[1].getEnd(), 47);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratNameSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratNameSampleStreamTest.java
index 0f136821..fac516ae 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratNameSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/brat/BratNameSampleStreamTest.java
@@ -25,8 +25,8 @@ import java.util.HashMap;
import java.util.Map;
import java.util.Set;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.namefind.NameSample;
import opennlp.tools.sentdetect.NewlineSentenceDetector;
@@ -52,7 +52,7 @@ public class BratNameSampleStreamTest {
}
@Test
- public void readNoOverlap() throws IOException {
+ void readNoOverlap() throws IOException {
BratNameSampleStream stream = createNameSampleWith("-entities.",
null);
int count = 0;
@@ -62,28 +62,33 @@ public class BratNameSampleStreamTest {
sample = stream.read();
}
- Assert.assertEquals(8, count);
+ Assertions.assertEquals(8, count);
}
- @Test(expected = RuntimeException.class)
- public void readOverlapFail() throws IOException {
- BratNameSampleStream stream = createNameSampleWith("overlapping",
- null);
+ @Test
+ void readOverlapFail() {
+ Assertions.assertThrows(RuntimeException.class, () -> {
+ BratNameSampleStream stream = createNameSampleWith("overlapping",
+ null);
+
+ NameSample sample = stream.read();
+ while (sample != null) {
+ sample = stream.read();
+ }
+ });
- NameSample sample = stream.read();
- while (sample != null) {
- sample = stream.read();
- }
}
- @Test(expected = IllegalArgumentException.class)
- public void emptySample() throws IOException {
- createNameSampleWith("overlapping",
- Collections.emptySet());
+ @Test
+ void emptySample() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ createNameSampleWith("overlapping",
+ Collections.emptySet());
+ });
}
@Test
- public void readOverlapFilter() throws IOException {
+ void readOverlapFilter() throws IOException {
BratNameSampleStream stream = createNameSampleWith("overlapping",
Collections.singleton("Person"));
int count = 0;
@@ -93,6 +98,6 @@ public class BratNameSampleStreamTest {
sample = stream.read();
}
- Assert.assertEquals(8, count);
+ Assertions.assertEquals(8, count);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluLemmaSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluLemmaSampleStreamTest.java
index 5d58cf1b..1ffc76c5 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluLemmaSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluLemmaSampleStreamTest.java
@@ -20,8 +20,8 @@ package opennlp.tools.formats.conllu;
import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.lemmatizer.LemmaSample;
@@ -32,7 +32,7 @@ public class ConlluLemmaSampleStreamTest {
@Test
- public void testParseSpanishS300() throws IOException {
+ void testParseSpanishS300() throws IOException {
InputStreamFactory streamFactory =
new ResourceAsStreamFactory(ConlluStreamTest.class, "es-ud-sample.conllu");
@@ -41,9 +41,9 @@ public class ConlluLemmaSampleStreamTest {
LemmaSample predicted = stream.read();
System.out.println(predicted);
- Assert.assertEquals("digám+tú+él", predicted.getLemmas()[0]);
- Assert.assertEquals("la", predicted.getTokens()[3]);
- Assert.assertEquals("el", predicted.getLemmas()[3]);
+ Assertions.assertEquals("digám+tú+él", predicted.getLemmas()[0]);
+ Assertions.assertEquals("la", predicted.getTokens()[3]);
+ Assertions.assertEquals("el", predicted.getLemmas()[3]);
}
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluPOSSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluPOSSampleStreamTest.java
index f6bef728..ffff417b 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluPOSSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluPOSSampleStreamTest.java
@@ -20,8 +20,8 @@ package opennlp.tools.formats.conllu;
import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.postag.POSSample;
@@ -30,7 +30,7 @@ import opennlp.tools.util.ObjectStream;
public class ConlluPOSSampleStreamTest {
@Test
- public void testParseContraction() throws IOException {
+ void testParseContraction() throws IOException {
InputStreamFactory streamFactory =
new ResourceAsStreamFactory(ConlluStreamTest.class, "pt_br-ud-sample.conllu");
@@ -48,13 +48,13 @@ public class ConlluPOSSampleStreamTest {
"antigo_ADJ Ciago_PROPN ._PUNCT");
POSSample predicted = stream.read();
- Assert.assertEquals(expected, predicted);
+ Assertions.assertEquals(expected, predicted);
}
}
@Test
- public void testParseSpanishS300() throws IOException {
+ void testParseSpanishS300() throws IOException {
InputStreamFactory streamFactory =
new ResourceAsStreamFactory(ConlluStreamTest.class, "es-ud-sample.conllu");
@@ -71,7 +71,7 @@ public class ConlluPOSSampleStreamTest {
"plantea_VERB ni_CCONJ siquiera_ADV \"_PUNCT esperar_VERB un_DET mejor_ADJ " +
"gobierno_NOUN \"_PUNCT ._PUNCT");
POSSample predicted = stream.read();
- Assert.assertEquals(expected1, predicted);
+ Assertions.assertEquals(expected1, predicted);
}
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluSentenceSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluSentenceSampleStreamTest.java
index d45d38fa..67dff7e9 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluSentenceSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluSentenceSampleStreamTest.java
@@ -19,8 +19,8 @@ package opennlp.tools.formats.conllu;
import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.sentdetect.SentenceSample;
@@ -31,7 +31,7 @@ import opennlp.tools.util.Span;
public class ConlluSentenceSampleStreamTest {
@Test
- public void testParseTwoSentences() throws IOException {
+ void testParseTwoSentences() throws IOException {
InputStreamFactory streamFactory =
new ResourceAsStreamFactory(ConlluStreamTest.class, "de-ud-train-sample.conllu");
@@ -40,30 +40,30 @@ public class ConlluSentenceSampleStreamTest {
SentenceSample sample1 = stream.read();
- Assert.assertEquals("Fachlich kompetent, sehr gute Beratung und ein freundliches Team.",
+ Assertions.assertEquals("Fachlich kompetent, sehr gute Beratung und ein freundliches Team.",
sample1.getDocument());
- Assert.assertEquals(new Span(0, 65), sample1.getSentences()[0]);
+ Assertions.assertEquals(new Span(0, 65), sample1.getSentences()[0]);
SentenceSample sample2 = stream.read();
- Assert.assertEquals("Beiden Zahnärzten verdanke ich einen neuen Biss und dadurch " +
+ Assertions.assertEquals("Beiden Zahnärzten verdanke ich einen neuen Biss und dadurch " +
"endlich keine Rückenschmerzen mehr.", sample2.getDocument());
- Assert.assertEquals(new Span(0, 95), sample2.getSentences()[0]);
+ Assertions.assertEquals(new Span(0, 95), sample2.getSentences()[0]);
- Assert.assertNull("Stream must be exhausted", stream.read());
+ Assertions.assertNull(stream.read(), "Stream must be exhausted");
}
try (ObjectStream<SentenceSample> stream =
new ConlluSentenceSampleStream(new ConlluStream(streamFactory), 3)) {
SentenceSample sample = stream.read();
- Assert.assertEquals("Fachlich kompetent, sehr gute Beratung und ein freundliches Team."
- + " Beiden Zahnärzten verdanke ich einen neuen Biss und dadurch endlich keine "
- + "Rückenschmerzen mehr.",
+ Assertions.assertEquals("Fachlich kompetent, sehr gute Beratung und ein freundliches Team."
+ + " Beiden Zahnärzten verdanke ich einen neuen Biss und dadurch endlich keine "
+ + "Rückenschmerzen mehr.",
sample.getDocument());
- Assert.assertNull("Stream must be exhausted", stream.read());
+ Assertions.assertNull(stream.read(), "Stream must be exhausted");
}
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluStreamTest.java
index ceb9a76d..f42bce88 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluStreamTest.java
@@ -24,8 +24,8 @@ import java.util.Locale;
import java.util.Map;
import java.util.Optional;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.util.InputStreamFactory;
@@ -34,7 +34,7 @@ import opennlp.tools.util.ObjectStream;
public class ConlluStreamTest {
@Test
- public void testParseTwoSentences() throws IOException {
+ void testParseTwoSentences() throws IOException {
InputStreamFactory streamFactory =
new ResourceAsStreamFactory(ConlluStreamTest.class, "de-ud-train-sample.conllu");
@@ -42,76 +42,76 @@ public class ConlluStreamTest {
try (ObjectStream<ConlluSentence> stream = new ConlluStream(streamFactory)) {
ConlluSentence sent1 = stream.read();
- Assert.assertEquals("train-s21", sent1.getSentenceIdComment());
- Assert.assertEquals("Fachlich kompetent, sehr gute Beratung und ein freundliches Team.",
+ Assertions.assertEquals("train-s21", sent1.getSentenceIdComment());
+ Assertions.assertEquals("Fachlich kompetent, sehr gute Beratung und ein freundliches Team.",
sent1.getTextComment());
- Assert.assertEquals(11, sent1.getWordLines().size());
+ Assertions.assertEquals(11, sent1.getWordLines().size());
ConlluSentence sent2 = stream.read();
- Assert.assertEquals("train-s22", sent2.getSentenceIdComment());
- Assert.assertEquals(
+ Assertions.assertEquals("train-s22", sent2.getSentenceIdComment());
+ Assertions.assertEquals(
"Beiden Zahnärzten verdanke ich einen neuen Biss und dadurch endlich keine Rückenschmerzen mehr.",
sent2.getTextComment());
- Assert.assertEquals(14, sent2.getWordLines().size());
+ Assertions.assertEquals(14, sent2.getWordLines().size());
- Assert.assertNull("Stream must be exhausted", stream.read());
+ Assertions.assertNull(stream.read(), "Stream must be exhausted");
}
}
@Test
- public void testOptionalComments() throws IOException {
+ void testOptionalComments() throws IOException {
InputStreamFactory streamFactory =
- new ResourceAsStreamFactory(ConlluStreamTest.class, "full-sample.conllu");
+ new ResourceAsStreamFactory(ConlluStreamTest.class, "full-sample.conllu");
try (ObjectStream<ConlluSentence> stream = new ConlluStream(streamFactory)) {
ConlluSentence sent1 = stream.read();
- Assert.assertEquals("1", sent1.getSentenceIdComment());
- Assert.assertEquals("They buy and sell books.",
- sent1.getTextComment());
- Assert.assertTrue(sent1.isNewDocument());
- Assert.assertTrue(sent1.isNewParagraph());
- Assert.assertEquals(6, sent1.getWordLines().size());
+ Assertions.assertEquals("1", sent1.getSentenceIdComment());
+ Assertions.assertEquals("They buy and sell books.",
+ sent1.getTextComment());
+ Assertions.assertTrue(sent1.isNewDocument());
+ Assertions.assertTrue(sent1.isNewParagraph());
+ Assertions.assertEquals(6, sent1.getWordLines().size());
ConlluSentence sent2 = stream.read();
- Assert.assertEquals("2", sent2.getSentenceIdComment());
- Assert.assertEquals(
- "I have no clue.",
- sent2.getTextComment());
- Assert.assertTrue(sent2.isNewDocument());
- Assert.assertEquals(5, sent2.getWordLines().size());
+ Assertions.assertEquals("2", sent2.getSentenceIdComment());
+ Assertions.assertEquals(
+ "I have no clue.",
+ sent2.getTextComment());
+ Assertions.assertTrue(sent2.isNewDocument());
+ Assertions.assertEquals(5, sent2.getWordLines().size());
ConlluSentence sent3 = stream.read();
- Assert.assertEquals("panc0.s4", sent3.getSentenceIdComment());
- Assert.assertEquals(Optional.of("tat yathānuśrūyate."), sent3.getTranslit());
- Assert.assertEquals("तत् यथानुश्रूयते।", sent3.getTextComment());
- Assert.assertEquals(3, sent3.getWordLines().size());
- Assert.assertTrue(sent3.isNewParagraph());
+ Assertions.assertEquals("panc0.s4", sent3.getSentenceIdComment());
+ Assertions.assertEquals(Optional.of("tat yathānuśrūyate."), sent3.getTranslit());
+ Assertions.assertEquals("तत् यथानुश्रूयते।", sent3.getTextComment());
+ Assertions.assertEquals(3, sent3.getWordLines().size());
+ Assertions.assertTrue(sent3.isNewParagraph());
Map<Object, Object> textLang3 = new HashMap<>();
textLang3.put(new Locale("fr"), "Voilà ce qui nous est parvenu par la tradition orale.");
textLang3.put(new Locale("en"), "This is what is heard.");
- Assert.assertEquals(Optional.of(textLang3)
- , sent3.getTextLang());
+ Assertions.assertEquals(Optional.of(textLang3)
+ , sent3.getTextLang());
ConlluSentence sent4 = stream.read();
- Assert.assertEquals("mf920901-001-p1s1A", sent4.getSentenceIdComment());
- Assert.assertEquals(
- "Slovenská ústava: pro i proti",
- sent4.getTextComment());
- Assert.assertEquals(6, sent4.getWordLines().size());
- Assert.assertTrue(sent4.isNewDocument());
- Assert.assertTrue(sent4.isNewParagraph());
- Assert.assertEquals(Optional.of("mf920901-001"), sent4.getDocumentId());
- Assert.assertEquals(Optional.of("mf920901-001-p1"), sent4.getParagraphId());
- Assert.assertEquals(Optional.of(Collections.singletonMap(new Locale("en"),
- "Slovak constitution: pros and cons"))
- , sent4.getTextLang());
-
- Assert.assertNull("Stream must be exhausted", stream.read());
+ Assertions.assertEquals("mf920901-001-p1s1A", sent4.getSentenceIdComment());
+ Assertions.assertEquals(
+ "Slovenská ústava: pro i proti",
+ sent4.getTextComment());
+ Assertions.assertEquals(6, sent4.getWordLines().size());
+ Assertions.assertTrue(sent4.isNewDocument());
+ Assertions.assertTrue(sent4.isNewParagraph());
+ Assertions.assertEquals(Optional.of("mf920901-001"), sent4.getDocumentId());
+ Assertions.assertEquals(Optional.of("mf920901-001-p1"), sent4.getParagraphId());
+ Assertions.assertEquals(Optional.of(Collections.singletonMap(new Locale("en"),
+ "Slovak constitution: pros and cons"))
+ , sent4.getTextLang());
+
+ Assertions.assertNull(stream.read(), "Stream must be exhausted");
}
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluTokenSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluTokenSampleStreamTest.java
index be32a3be..d6d96e32 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluTokenSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluTokenSampleStreamTest.java
@@ -19,8 +19,8 @@ package opennlp.tools.formats.conllu;
import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.tokenize.TokenSample;
@@ -30,7 +30,7 @@ import opennlp.tools.util.ObjectStream;
public class ConlluTokenSampleStreamTest {
@Test
- public void testParseTwoSentences() throws IOException {
+ void testParseTwoSentences() throws IOException {
InputStreamFactory streamFactory =
new ResourceAsStreamFactory(ConlluStreamTest.class, "de-ud-train-sample.conllu");
@@ -40,19 +40,19 @@ public class ConlluTokenSampleStreamTest {
"Fachlich kompetent" + TokenSample.DEFAULT_SEPARATOR_CHARS
+ ", sehr gute Beratung und ein freundliches Team" + TokenSample.DEFAULT_SEPARATOR_CHARS
+ ".", TokenSample.DEFAULT_SEPARATOR_CHARS);
- Assert.assertEquals(expected1, stream.read());
+ Assertions.assertEquals(expected1, stream.read());
TokenSample expected2 = TokenSample.parse("Beiden Zahnärzten verdanke ich einen " +
"neuen Biss und dadurch endlich keine Rückenschmerzen mehr"
+ TokenSample.DEFAULT_SEPARATOR_CHARS + ".", TokenSample.DEFAULT_SEPARATOR_CHARS);
- Assert.assertEquals(expected2, stream.read());
+ Assertions.assertEquals(expected2, stream.read());
- Assert.assertNull("Stream must be exhausted", stream.read());
+ Assertions.assertNull(stream.read(), "Stream must be exhausted");
}
}
@Test
- public void testParseContraction() throws IOException {
+ void testParseContraction() throws IOException {
InputStreamFactory streamFactory =
new ResourceAsStreamFactory(ConlluStreamTest.class, "pt_br-ud-sample.conllu");
@@ -69,12 +69,12 @@ public class ConlluTokenSampleStreamTest {
TokenSample.DEFAULT_SEPARATOR_CHARS + "."
, TokenSample.DEFAULT_SEPARATOR_CHARS);
TokenSample predicted = stream.read();
- Assert.assertEquals(expected1, predicted);
+ Assertions.assertEquals(expected1, predicted);
}
}
@Test
- public void testParseSpanishS300() throws IOException {
+ void testParseSpanishS300() throws IOException {
InputStreamFactory streamFactory =
new ResourceAsStreamFactory(ConlluStreamTest.class, "es-ud-sample.conllu");
@@ -94,7 +94,7 @@ public class ConlluTokenSampleStreamTest {
, TokenSample.DEFAULT_SEPARATOR_CHARS);
TokenSample predicted = stream.read();
- Assert.assertEquals(expected1, predicted);
+ Assertions.assertEquals(expected1, predicted);
}
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluWordLineTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluWordLineTest.java
index 005ec55d..3bd35e39 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluWordLineTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/conllu/ConlluWordLineTest.java
@@ -17,27 +17,27 @@
package opennlp.tools.formats.conllu;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.util.InvalidFormatException;
public class ConlluWordLineTest {
@Test
- public void testParseLine() throws InvalidFormatException {
+ void testParseLine() throws InvalidFormatException {
ConlluWordLine line = new ConlluWordLine(
"12\tHänden\tHand\tNOUN\tNN\tCase=Dat|Number=Plur\t5\tnmod\t_\t_");
- Assert.assertEquals("12", line.getId());
- Assert.assertEquals("Händen", line.getForm());
- Assert.assertEquals("Hand", line.getLemma());
- Assert.assertEquals("NOUN", line.getPosTag(ConlluTagset.U));
- Assert.assertEquals("NN", line.getPosTag(ConlluTagset.X));
- Assert.assertEquals("Case=Dat|Number=Plur", line.getFeats());
- Assert.assertEquals("5", line.getHead());
- Assert.assertEquals("nmod", line.getDeprel());
- Assert.assertEquals("_", line.getDeps());
- Assert.assertEquals("_", line.getMisc());
+ Assertions.assertEquals("12", line.getId());
+ Assertions.assertEquals("Händen", line.getForm());
+ Assertions.assertEquals("Hand", line.getLemma());
+ Assertions.assertEquals("NOUN", line.getPosTag(ConlluTagset.U));
+ Assertions.assertEquals("NN", line.getPosTag(ConlluTagset.X));
+ Assertions.assertEquals("Case=Dat|Number=Plur", line.getFeats());
+ Assertions.assertEquals("5", line.getHead());
+ Assertions.assertEquals("nmod", line.getDeprel());
+ Assertions.assertEquals("_", line.getDeps());
+ Assertions.assertEquals("_", line.getMisc());
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/frenchtreebank/ConstitParseSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/frenchtreebank/ConstitParseSampleStreamTest.java
index a9fc6567..94ab5a0a 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/frenchtreebank/ConstitParseSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/frenchtreebank/ConstitParseSampleStreamTest.java
@@ -21,8 +21,8 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.parser.Parse;
import opennlp.tools.util.ObjectStream;
@@ -30,7 +30,7 @@ import opennlp.tools.util.ObjectStreamUtils;
public class ConstitParseSampleStreamTest {
- private String[] sample1Tokens = new String[]{
+ private String[] sample1Tokens = new String[] {
"L'",
"autonomie",
"de",
@@ -91,7 +91,7 @@ public class ConstitParseSampleStreamTest {
byte[] buffer = new byte[1024];
int length;
try (InputStream sampleIn =
- ConstitParseSampleStreamTest.class.getResourceAsStream("sample1.xml")) {
+ ConstitParseSampleStreamTest.class.getResourceAsStream("sample1.xml")) {
while ((length = sampleIn.read(buffer)) > 0) {
out.write(buffer, 0, length);
}
@@ -101,20 +101,20 @@ public class ConstitParseSampleStreamTest {
}
@Test
- public void testThereIsExactlyOneSent() throws IOException {
+ void testThereIsExactlyOneSent() throws IOException {
try (ObjectStream<Parse> samples =
- new ConstitParseSampleStream(ObjectStreamUtils.createObjectStream(getSample1()))) {
- Assert.assertNotNull(samples.read());
- Assert.assertNull(samples.read());
- Assert.assertNull(samples.read());
+ new ConstitParseSampleStream(ObjectStreamUtils.createObjectStream(getSample1()))) {
+ Assertions.assertNotNull(samples.read());
+ Assertions.assertNull(samples.read());
+ Assertions.assertNull(samples.read());
}
}
@Test
- public void testTokensAreCorrect() throws IOException {
+ void testTokensAreCorrect() throws IOException {
try (ObjectStream<Parse> samples =
- new ConstitParseSampleStream(ObjectStreamUtils.createObjectStream(getSample1()))) {
+ new ConstitParseSampleStream(ObjectStreamUtils.createObjectStream(getSample1()))) {
Parse p = samples.read();
Parse[] tagNodes = p.getTagNodes();
@@ -123,7 +123,7 @@ public class ConstitParseSampleStreamTest {
tokens[ti] = tagNodes[ti].getCoveredText();
}
- Assert.assertArrayEquals(sample1Tokens, tokens);
+ Assertions.assertArrayEquals(sample1Tokens, tokens);
}
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/irishsentencebank/IrishSentenceBankDocumentTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/irishsentencebank/IrishSentenceBankDocumentTest.java
index 671fea09..71ead885 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/irishsentencebank/IrishSentenceBankDocumentTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/irishsentencebank/IrishSentenceBankDocumentTest.java
@@ -21,8 +21,8 @@ import java.io.IOException;
import java.io.InputStream;
import java.util.List;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.tokenize.TokenSample;
import opennlp.tools.util.Span;
@@ -30,38 +30,38 @@ import opennlp.tools.util.Span;
public class IrishSentenceBankDocumentTest {
@Test
- public void testParsingSimpleDoc() throws IOException {
- try (InputStream irishSBXmlIn =
- IrishSentenceBankDocumentTest.class.getResourceAsStream("irishsentencebank-sample.xml")) {
+ void testParsingSimpleDoc() throws IOException {
+ try (InputStream irishSBXmlIn =
+ IrishSentenceBankDocumentTest.class.getResourceAsStream("irishsentencebank-sample.xml")) {
IrishSentenceBankDocument doc = IrishSentenceBankDocument.parse(irishSBXmlIn);
List<IrishSentenceBankDocument.IrishSentenceBankSentence> sents = doc.getSentences();
- Assert.assertEquals(2, sents.size());
+ Assertions.assertEquals(2, sents.size());
IrishSentenceBankDocument.IrishSentenceBankSentence sent1 = sents.get(0);
IrishSentenceBankDocument.IrishSentenceBankSentence sent2 = sents.get(1);
- Assert.assertEquals("A Dhia, tá mé ag iompar clainne!", sent1.getOriginal());
+ Assertions.assertEquals("A Dhia, tá mé ag iompar clainne!", sent1.getOriginal());
IrishSentenceBankDocument.IrishSentenceBankFlex[] flex = sent1.getFlex();
- Assert.assertEquals(7, flex.length);
- Assert.assertEquals("A", flex[0].getSurface());
- Assert.assertArrayEquals(new String[]{"a"}, flex[0].getFlex());
+ Assertions.assertEquals(7, flex.length);
+ Assertions.assertEquals("A", flex[0].getSurface());
+ Assertions.assertArrayEquals(new String[] {"a"}, flex[0].getFlex());
IrishSentenceBankDocument.IrishSentenceBankFlex[] flex2 = sent2.getFlex();
- Assert.assertEquals("ón", flex2[4].getSurface());
- Assert.assertArrayEquals(new String[]{"ó", "an"}, flex2[4].getFlex());
+ Assertions.assertEquals("ón", flex2[4].getSurface());
+ Assertions.assertArrayEquals(new String[] {"ó", "an"}, flex2[4].getFlex());
- Assert.assertEquals("Excuse me, are you from the stone age?", sent2.getTranslation());
+ Assertions.assertEquals("Excuse me, are you from the stone age?", sent2.getTranslation());
TokenSample ts = sent1.getTokenSample();
Span[] spans = ts.getTokenSpans();
- Assert.assertEquals(9, spans.length);
- Assert.assertEquals(24, spans[7].getStart());
- Assert.assertEquals(31, spans[7].getEnd());
- Assert.assertEquals("clainne", ts.getText().substring(spans[7].getStart(), spans[7].getEnd()));
+ Assertions.assertEquals(9, spans.length);
+ Assertions.assertEquals(24, spans[7].getStart());
+ Assertions.assertEquals(31, spans[7].getEnd());
+ Assertions.assertEquals("clainne", ts.getText().substring(spans[7].getStart(), spans[7].getEnd()));
}
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/leipzig/LeipzigLanguageSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/leipzig/LeipzigLanguageSampleStreamTest.java
index b6efab46..2d1b8698 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/leipzig/LeipzigLanguageSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/leipzig/LeipzigLanguageSampleStreamTest.java
@@ -20,47 +20,53 @@ package opennlp.tools.formats.leipzig;
import java.io.File;
import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.util.InvalidFormatException;
/**
* Tests for the {@link LeipzigLanguageSampleStream} class.
*/
+
public class LeipzigLanguageSampleStreamTest {
private static String testDataPath = LeipzigLanguageSampleStreamTest.class
- .getClassLoader().getResource("opennlp/tools/formats/leipzig/samples").getPath();
+ .getClassLoader().getResource("opennlp/tools/formats/leipzig/samples").getPath();
@Test
- public void testReadSentenceFiles() {
+ void testReadSentenceFiles() {
int samplesPerLanguage = 2;
int sentencesPerSample = 1;
try {
LeipzigLanguageSampleStream stream = new LeipzigLanguageSampleStream(new File(testDataPath),
- sentencesPerSample, samplesPerLanguage);
+ sentencesPerSample, samplesPerLanguage);
int count = 0;
- while (stream.read() != null)
+ while (stream.read() != null) {
count++;
+ }
- Assert.assertEquals(4, count);
+ Assertions.assertEquals(4, count);
} catch (IOException e) {
- Assert.fail();
+ Assertions.fail();
}
}
- @Test(expected = InvalidFormatException.class)
- public void testNotEnoughSentences() throws IOException {
- int samplesPerLanguage = 2;
- int sentencesPerSample = 2;
+ @Test
+ void testNotEnoughSentences() {
+ Assertions.assertThrows(InvalidFormatException.class, () -> {
+ int samplesPerLanguage = 2;
+ int sentencesPerSample = 2;
- LeipzigLanguageSampleStream stream =
- new LeipzigLanguageSampleStream(new File(testDataPath),
+ LeipzigLanguageSampleStream stream =
+ new LeipzigLanguageSampleStream(new File(testDataPath),
sentencesPerSample, samplesPerLanguage);
- while (stream.read() != null);
+ while (stream.read() != null) ;
+
+ });
+
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/letsmt/LetsmtDocumentTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/letsmt/LetsmtDocumentTest.java
index f50681e2..e73c942b 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/letsmt/LetsmtDocumentTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/letsmt/LetsmtDocumentTest.java
@@ -21,25 +21,25 @@ import java.io.IOException;
import java.io.InputStream;
import java.util.List;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
public class LetsmtDocumentTest {
@Test
- public void testParsingSimpleDoc() throws IOException {
+ void testParsingSimpleDoc() throws IOException {
try (InputStream letsmtXmlIn = LetsmtDocumentTest.class.getResourceAsStream("letsmt-with-words.xml");) {
LetsmtDocument doc = LetsmtDocument.parse(letsmtXmlIn);
List<LetsmtDocument.LetsmtSentence> sents = doc.getSentences();
- Assert.assertEquals(2, sents.size());
+ Assertions.assertEquals(2, sents.size());
LetsmtDocument.LetsmtSentence sent1 = sents.get(0);
- Assert.assertNull(sent1.getNonTokenizedText());
+ Assertions.assertNull(sent1.getNonTokenizedText());
- Assert.assertArrayEquals(new String[]{
+ Assertions.assertArrayEquals(new String[] {
"The",
"Apache",
"Software",
@@ -72,12 +72,12 @@ public class LetsmtDocumentTest {
"software",
"products",
"."
- }, sent1.getTokens());
+ }, sent1.getTokens());
LetsmtDocument.LetsmtSentence sent2 = sents.get(1);
- Assert.assertNull(sent2.getNonTokenizedText());
+ Assertions.assertNull(sent2.getNonTokenizedText());
- Assert.assertArrayEquals(new String[]{
+ Assertions.assertArrayEquals(new String[] {
"All",
"software",
"produced",
@@ -105,7 +105,7 @@ public class LetsmtDocumentTest {
"listed",
"below",
"."
- }, sent2.getTokens());
+ }, sent2.getTokens());
}
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascNamedEntitySampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascNamedEntitySampleStreamTest.java
index 2f111506..5dea461a 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascNamedEntitySampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascNamedEntitySampleStreamTest.java
@@ -21,7 +21,8 @@ import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.namefind.NameFinderME;
import opennlp.tools.namefind.NameSample;
@@ -32,16 +33,10 @@ import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.Span;
import opennlp.tools.util.TrainingParameters;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
public class MascNamedEntitySampleStreamTest {
@Test
- public void read() {
+ void read() {
try {
FileFilter fileFilter = pathname -> pathname.getName().contains("MASC");
File directory = new File(this.getClass().getResource(
@@ -53,31 +48,31 @@ public class MascNamedEntitySampleStreamTest {
NameSample s = stream.read();
String[] expectedTokens = {"This", "is", "a", "test", "Sentence", "."};
- assertArrayEquals(expectedTokens, s.getSentence());
+ Assertions.assertArrayEquals(expectedTokens, s.getSentence());
Span[] expectedTags = new Span[] {new Span(4, 5, "org")};
Span[] returnedTags = s.getNames();
// check the start/end positions
- assertEquals(expectedTags.length, returnedTags.length);
+ Assertions.assertEquals(expectedTags.length, returnedTags.length);
for (int i = 0; i < returnedTags.length; i++) {
- assertTrue(expectedTags[i].equals(returnedTags[i]));
+ Assertions.assertTrue(expectedTags[i].equals(returnedTags[i]));
}
s = stream.read();
expectedTokens = new String[] {"This", "is", "'nother", "test", "sentence", "."};
- assertArrayEquals(expectedTokens, s.getSentence());
+ Assertions.assertArrayEquals(expectedTokens, s.getSentence());
expectedTags = new Span[] {};
returnedTags = s.getNames();
- assertArrayEquals(expectedTags, returnedTags);
+ Assertions.assertArrayEquals(expectedTags, returnedTags);
} catch (IOException e) {
- fail("IO Exception: " + e.getMessage());
+ Assertions.fail("IO Exception: " + e.getMessage());
}
}
@Test
- public void close() {
+ void close() {
try {
FileFilter fileFilter = pathname -> pathname.getName().contains("MASC");
File directory = new File(this.getClass().getResource(
@@ -89,14 +84,14 @@ public class MascNamedEntitySampleStreamTest {
stream.close();
NameSample s = stream.read();
} catch (IOException e) {
- assertEquals(e.getMessage(),
+ Assertions.assertEquals(e.getMessage(),
"You are reading an empty document stream. " +
"Did you close it?");
}
}
@Test
- public void reset() {
+ void reset() {
try {
FileFilter fileFilter = pathname -> pathname.getName().contains("MASC");
File directory = new File(this.getClass().getResource(
@@ -108,29 +103,29 @@ public class MascNamedEntitySampleStreamTest {
NameSample s = stream.read();
s = stream.read();
s = stream.read();
- assertNull(s); //The stream should be exhausted by now
+ Assertions.assertNull(s); //The stream should be exhausted by now
stream.reset();
s = stream.read();
String[] expectedTokens = {"This", "is", "a", "test", "Sentence", "."};
- assertArrayEquals(expectedTokens, s.getSentence());
+ Assertions.assertArrayEquals(expectedTokens, s.getSentence());
Span[] expectedTags = new Span[] {new Span(4, 5, "org")};
Span[] returnedTags = s.getNames();
// check the start/end positions
- assertEquals(expectedTags.length, returnedTags.length);
+ Assertions.assertEquals(expectedTags.length, returnedTags.length);
for (int i = 0; i < returnedTags.length; i++) {
- assertTrue(expectedTags[i].equals(returnedTags[i]));
+ Assertions.assertTrue(expectedTags[i].equals(returnedTags[i]));
}
} catch (IOException e) {
- fail("IO Exception: " + e.getMessage());
+ Assertions.fail("IO Exception: " + e.getMessage());
}
}
@Test
- public void train() {
+ void train() {
try {
File directory = new File(this.getClass().getResource(
"/opennlp/tools/formats/masc/").getFile());
@@ -160,7 +155,7 @@ public class MascNamedEntitySampleStreamTest {
for (StackTraceElement trace : traces) {
System.err.println(trace.toString());
}
- fail("Exception raised");
+ Assertions.fail("Exception raised");
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascPOSSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascPOSSampleStreamTest.java
index 4eba1c72..a38032f4 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascPOSSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascPOSSampleStreamTest.java
@@ -22,7 +22,8 @@ import java.io.FileFilter;
import java.io.IOException;
import java.util.Arrays;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.postag.POSEvaluator;
import opennlp.tools.postag.POSModel;
@@ -32,15 +33,10 @@ import opennlp.tools.postag.POSTaggerME;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.TrainingParameters;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.fail;
-
public class MascPOSSampleStreamTest {
@Test
- public void read() {
+ void read() {
try {
FileFilter fileFilter = pathname -> pathname.getName().contains("MASC");
File directory = new File(this.getClass().getResource(
@@ -52,24 +48,24 @@ public class MascPOSSampleStreamTest {
POSSample s = stream.read();
String[] expectedTokens = {"This", "is", "a", "test", "Sentence", "."};
- assertArrayEquals(expectedTokens, s.getSentence());
+ Assertions.assertArrayEquals(expectedTokens, s.getSentence());
String[] expectedTags = {"DT", "VB", "AT", "NN", "NN", "."};
- assertArrayEquals(expectedTags, s.getTags());
+ Assertions.assertArrayEquals(expectedTags, s.getTags());
s = stream.read();
expectedTokens = new String[] {"This", "is", "'nother", "test", "sentence", "."};
- assertArrayEquals(expectedTokens, s.getSentence());
+ Assertions.assertArrayEquals(expectedTokens, s.getSentence());
expectedTags = new String[] {"DT", "VB", "RB", "NN", "NN", "."};
- assertArrayEquals(expectedTags, s.getTags());
+ Assertions.assertArrayEquals(expectedTags, s.getTags());
} catch (IOException e) {
- fail("IO Exception: " + e.getMessage());
+ Assertions.fail("IO Exception: " + e.getMessage());
}
}
@Test
- public void close() {
+ void close() {
try {
FileFilter fileFilter = pathname -> pathname.getName().contains("MASC");
File directory = new File(this.getClass().getResource(
@@ -81,14 +77,14 @@ public class MascPOSSampleStreamTest {
stream.close();
POSSample s = stream.read();
} catch (IOException e) {
- assertEquals(e.getMessage(),
+ Assertions.assertEquals(e.getMessage(),
"You are reading an empty document stream. " +
"Did you close it?");
}
}
@Test
- public void reset() {
+ void reset() {
try {
FileFilter fileFilter = pathname -> pathname.getName().contains("MASC");
File directory = new File(this.getClass().getResource(
@@ -100,25 +96,25 @@ public class MascPOSSampleStreamTest {
POSSample s = stream.read();
s = stream.read();
s = stream.read();
- assertNull(s); //The stream should be exhausted by now
+ Assertions.assertNull(s); //The stream should be exhausted by now
stream.reset();
s = stream.read();
String[] expectedTokens = {"This", "is", "a", "test", "Sentence", "."};
- assertArrayEquals(expectedTokens, s.getSentence());
+ Assertions.assertArrayEquals(expectedTokens, s.getSentence());
String[] expectedTags = {"DT", "VB", "AT", "NN", "NN", "."};
- assertArrayEquals(expectedTags, s.getTags());
+ Assertions.assertArrayEquals(expectedTags, s.getTags());
} catch (IOException e) {
- fail("IO Exception: " + e.getMessage());
+ Assertions.fail("IO Exception: " + e.getMessage());
}
}
@Test
- public void train() {
+ void train() {
try {
File directory = new File(this.getClass().getResource(
"/opennlp/tools/formats/masc/").getFile());
@@ -145,7 +141,7 @@ public class MascPOSSampleStreamTest {
} catch (Exception e) {
System.err.println(e.getMessage());
System.err.println(Arrays.toString(e.getStackTrace()));
- fail("Exception raised");
+ Assertions.fail("Exception raised");
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascSentenceSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascSentenceSampleStreamTest.java
index 62982730..4b0ea41b 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascSentenceSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascSentenceSampleStreamTest.java
@@ -24,8 +24,9 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
import opennlp.tools.sentdetect.SentenceDetectorEvaluator;
import opennlp.tools.sentdetect.SentenceDetectorFactory;
@@ -36,15 +37,10 @@ import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.Span;
import opennlp.tools.util.TrainingParameters;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.fail;
-
public class MascSentenceSampleStreamTest {
@Test
- public void reset() {
+ void reset() {
FileFilter fileFilter = pathname -> pathname.getName().contains("MASC");
File directory = new File(this.getClass().getResource(
"/opennlp/tools/formats/masc/").getFile());
@@ -57,12 +53,12 @@ public class MascSentenceSampleStreamTest {
//now we should get null
testSample = stream.read();
- assertNull(testSample);
+ Assertions.assertNull(testSample);
//by resetting, we should get good results again
stream.reset();
testSample = stream.read();
- assertNotNull(testSample);
+ Assertions.assertNotNull(testSample);
String documentText = "This is a test Sentence. This is 'nother test sentence. ";
List<Span> sentenceSpans = new ArrayList<>();
@@ -71,15 +67,15 @@ public class MascSentenceSampleStreamTest {
SentenceSample expectedSample = new SentenceSample(documentText,
sentenceSpans.toArray(new Span[sentenceSpans.size()]));
- assertEquals(testSample.toString(), expectedSample.toString());
+ Assertions.assertEquals(testSample.toString(), expectedSample.toString());
} catch (IOException e) {
- fail("IO Exception");
+ Assertions.fail("IO Exception");
}
}
@Test
- public void close() {
+ void close() {
try {
FileFilter fileFilter = pathname -> pathname.getName().contains("MASC");
@@ -91,14 +87,14 @@ public class MascSentenceSampleStreamTest {
stream.close();
stream.read();
} catch (IOException e) {
- assertEquals(e.getMessage(),
+ Assertions.assertEquals(e.getMessage(),
"You are reading an empty document stream. " +
"Did you close it?");
}
}
@Test
- public void read() {
+ void read() {
FileFilter fileFilter = pathname -> pathname.getName().contains("");
File directory = new File(this.getClass().getResource("/opennlp/tools/formats/masc").getFile());
try {
@@ -113,23 +109,23 @@ public class MascSentenceSampleStreamTest {
SentenceSample expectedSample = new SentenceSample(documentText,
sentenceSpans.toArray(new Span[sentenceSpans.size()]));
SentenceSample testSample = stream.read();
- assertEquals(testSample.toString(), expectedSample.toString());
+ Assertions.assertEquals(testSample.toString(), expectedSample.toString());
//the fake file is exhausted, we should get null now
testSample = stream.read();
- assertNull(testSample);
+ Assertions.assertNull(testSample);
} catch (IOException e) {
System.out.println(e.getMessage());
System.out.println(Arrays.toString(e.getStackTrace()));
- fail("IO Exception");
+ Assertions.fail("IO Exception");
}
}
- @Ignore //todo: We can't train on the FakeMasc data, it is too small.
+ @Disabled //todo: We can't train on the FakeMasc data, it is too small.
@Test
- public void train() {
+ void train() {
try {
File directory = new File(this.getClass().getResource(
"/opennlp/tools/formats/masc/").getFile());
@@ -156,7 +152,7 @@ public class MascSentenceSampleStreamTest {
} catch (Exception e) {
System.err.println(e.getMessage());
System.err.println(Arrays.toString(e.getStackTrace()));
- fail("Exception raised");
+ Assertions.fail("Exception raised");
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascTokenSampleStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascTokenSampleStreamTest.java
index ec2fbe14..d11c7d78 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascTokenSampleStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/masc/MascTokenSampleStreamTest.java
@@ -22,7 +22,8 @@ import java.io.FileFilter;
import java.io.IOException;
import java.util.Arrays;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.tokenize.TokenSample;
import opennlp.tools.tokenize.TokenizerEvaluator;
@@ -33,15 +34,10 @@ import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.Span;
import opennlp.tools.util.TrainingParameters;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.fail;
-
public class MascTokenSampleStreamTest {
@Test
- public void read() {
+ void read() {
try {
FileFilter fileFilter = pathname -> pathname.getName().contains("MASC");
File directory = new File(this.getClass().getResource(
@@ -53,7 +49,7 @@ public class MascTokenSampleStreamTest {
TokenSample s = stream.read();
String expectedString = "This is a test Sentence.";
- assertEquals(expectedString, s.getText());
+ Assertions.assertEquals(expectedString, s.getText());
Span[] expectedTags = {
new Span(0, 4),
@@ -62,11 +58,11 @@ public class MascTokenSampleStreamTest {
new Span(10, 14),
new Span(15, 23),
new Span(23, 24)};
- assertArrayEquals(expectedTags, s.getTokenSpans());
+ Assertions.assertArrayEquals(expectedTags, s.getTokenSpans());
s = stream.read();
String expectedTokens = "This is 'nother test sentence.";
- assertEquals(expectedTokens, s.getText());
+ Assertions.assertEquals(expectedTokens, s.getText());
expectedTags = new Span[] {
new Span(0, 4),
@@ -75,14 +71,14 @@ public class MascTokenSampleStreamTest {
new Span(16, 20),
new Span(21, 29),
new Span(29, 30)};
- assertArrayEquals(expectedTags, s.getTokenSpans());
+ Assertions.assertArrayEquals(expectedTags, s.getTokenSpans());
} catch (IOException e) {
- fail("IO Exception: " + e.getMessage());
+ Assertions.fail("IO Exception: " + e.getMessage());
}
}
@Test
- public void close() {
+ void close() {
try {
FileFilter fileFilter = pathname -> pathname.getName().contains("MASC");
File directory = new File(this.getClass().getResource(
@@ -94,14 +90,14 @@ public class MascTokenSampleStreamTest {
stream.close();
TokenSample s = stream.read();
} catch (IOException e) {
- assertEquals(e.getMessage(),
+ Assertions.assertEquals(e.getMessage(),
"You are reading an empty document stream. " +
"Did you close it?");
}
}
@Test
- public void reset() {
+ void reset() {
try {
FileFilter fileFilter = pathname -> pathname.getName().contains("MASC");
File directory = new File(this.getClass().getResource(
@@ -113,14 +109,14 @@ public class MascTokenSampleStreamTest {
TokenSample s = stream.read();
s = stream.read();
s = stream.read();
- assertNull(s); //The stream should be exhausted by now
+ Assertions.assertNull(s); //The stream should be exhausted by now
stream.reset();
s = stream.read();
String expectedString = "This is a test Sentence.";
- assertEquals(expectedString, s.getText());
+ Assertions.assertEquals(expectedString, s.getText());
Span[] expectedTags = {
new Span(0, 4),
@@ -129,16 +125,16 @@ public class MascTokenSampleStreamTest {
new Span(10, 14),
new Span(15, 23),
new Span(23, 24)};
- assertArrayEquals(expectedTags, s.getTokenSpans());
+ Assertions.assertArrayEquals(expectedTags, s.getTokenSpans());
} catch (IOException e) {
- fail("IO Exception: " + e.getMessage());
+ Assertions.fail("IO Exception: " + e.getMessage());
}
}
@Test
- public void train() {
+ void train() {
try {
File directory = new File(this.getClass().getResource(
"/opennlp/tools/formats/masc/").getFile());
@@ -165,7 +161,7 @@ public class MascTokenSampleStreamTest {
} catch (Exception e) {
System.err.println(e.getMessage());
System.err.println(Arrays.toString(e.getStackTrace()));
- fail("Exception raised");
+ Assertions.fail("Exception raised");
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/muc/DocumentSplitterStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/muc/DocumentSplitterStreamTest.java
index 24629f51..acbb15d2 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/muc/DocumentSplitterStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/muc/DocumentSplitterStreamTest.java
@@ -19,8 +19,8 @@ package opennlp.tools.formats.muc;
import java.io.IOException;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.ObjectStreamUtils;
@@ -28,7 +28,7 @@ import opennlp.tools.util.ObjectStreamUtils;
public class DocumentSplitterStreamTest {
@Test
- public void testSplitTwoDocuments() throws IOException {
+ void testSplitTwoDocuments() throws IOException {
StringBuilder docsString = new StringBuilder();
@@ -41,15 +41,15 @@ public class DocumentSplitterStreamTest {
try (ObjectStream<String> docs = new DocumentSplitterStream(
ObjectStreamUtils.createObjectStream(docsString.toString()))) {
String doc1 = docs.read();
- Assert.assertEquals(docsString.length() / 2, doc1.length() + 1);
- Assert.assertTrue(doc1.contains("#0"));
+ Assertions.assertEquals(docsString.length() / 2, doc1.length() + 1);
+ Assertions.assertTrue(doc1.contains("#0"));
String doc2 = docs.read();
- Assert.assertEquals(docsString.length() / 2, doc2.length() + 1);
- Assert.assertTrue(doc2.contains("#1"));
+ Assertions.assertEquals(docsString.length() / 2, doc2.length() + 1);
+ Assertions.assertTrue(doc2.contains("#1"));
- Assert.assertNull(docs.read());
- Assert.assertNull(docs.read());
+ Assertions.assertNull(docs.read());
+ Assertions.assertNull(docs.read());
}
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/muc/SgmlParserTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/muc/SgmlParserTest.java
index 8cc3fa00..1aca7a9c 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/muc/SgmlParserTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/muc/SgmlParserTest.java
@@ -22,12 +22,12 @@ import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
public class SgmlParserTest {
@Test
- public void testParse1() throws IOException {
+ void testParse1() throws IOException {
try (Reader in = new InputStreamReader(
SgmlParserTest.class.getResourceAsStream("parsertest1.sgml"), StandardCharsets.UTF_8)) {
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/nkjp/NKJPSegmentationDocumentTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/nkjp/NKJPSegmentationDocumentTest.java
index 226b92ce..43debb94 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/nkjp/NKJPSegmentationDocumentTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/nkjp/NKJPSegmentationDocumentTest.java
@@ -20,29 +20,28 @@ package opennlp.tools.formats.nkjp;
import java.io.IOException;
import java.io.InputStream;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
public class NKJPSegmentationDocumentTest {
@Test
- public void testParsingSimpleDoc() throws IOException {
+ void testParsingSimpleDoc() throws IOException {
try (InputStream nkjpSegXmlIn =
- NKJPSegmentationDocumentTest.class.getResourceAsStream("ann_segmentation.xml")) {
+ NKJPSegmentationDocumentTest.class.getResourceAsStream("ann_segmentation.xml")) {
NKJPSegmentationDocument doc = NKJPSegmentationDocument.parse(nkjpSegXmlIn);
- assertEquals(1, doc.getSegments().size());
+ Assertions.assertEquals(1, doc.getSegments().size());
- assertEquals(7, doc.getSegments().get("segm_1.1-s").size());
+ Assertions.assertEquals(7, doc.getSegments().get("segm_1.1-s").size());
String src = "To krótkie zdanie w drugim akapicie.";
int offset = doc.getSegments().get("segm_1.1-s").get("segm_1.1-seg").offset;
- assertEquals(0, offset);
+ Assertions.assertEquals(0, offset);
int length = doc.getSegments().get("segm_1.1-s").get("segm_1.1-seg").length;
- assertEquals(2, length);
- assertEquals("To", src.substring(offset, length));
+ Assertions.assertEquals(2, length);
+ Assertions.assertEquals("To", src.substring(offset, length));
}
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/formats/nkjp/NKJPTextDocumentTest.java b/opennlp-tools/src/test/java/opennlp/tools/formats/nkjp/NKJPTextDocumentTest.java
index 760af897..efe0f420 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/formats/nkjp/NKJPTextDocumentTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/formats/nkjp/NKJPTextDocumentTest.java
@@ -20,38 +20,37 @@ package opennlp.tools.formats.nkjp;
import java.io.InputStream;
import java.util.Map;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
public class NKJPTextDocumentTest {
@Test
- public void testParsingSimpleDoc() throws Exception {
+ void testParsingSimpleDoc() throws Exception {
try (InputStream nkjpTextXmlIn =
- NKJPTextDocumentTest.class.getResourceAsStream("text_structure.xml")) {
+ NKJPTextDocumentTest.class.getResourceAsStream("text_structure.xml")) {
NKJPTextDocument doc = NKJPTextDocument.parse(nkjpTextXmlIn);
- assertEquals(1, doc.getDivtypes().size());
- assertEquals("article", doc.getDivtypes().get("div-1"));
+ Assertions.assertEquals(1, doc.getDivtypes().size());
+ Assertions.assertEquals("article", doc.getDivtypes().get("div-1"));
- assertEquals(1, doc.getTexts().size());
- assertEquals(1, doc.getTexts().get("text-1").size());
- assertEquals(2, doc.getTexts().get("text-1").get("div-1").size());
+ Assertions.assertEquals(1, doc.getTexts().size());
+ Assertions.assertEquals(1, doc.getTexts().get("text-1").size());
+ Assertions.assertEquals(2, doc.getTexts().get("text-1").get("div-1").size());
String exp = "To krótki tekst w formacie NKJP. Zawiera dwa zdania.";
- assertEquals(exp, doc.getTexts().get("text-1").get("div-1").get("p-1"));
+ Assertions.assertEquals(exp, doc.getTexts().get("text-1").get("div-1").get("p-1"));
}
}
@Test
- public void testGetParagraphs() throws Exception {
+ void testGetParagraphs() throws Exception {
try (InputStream nkjpTextXmlIn =
- NKJPTextDocumentTest.class.getResourceAsStream("text_structure.xml")) {
+ NKJPTextDocumentTest.class.getResourceAsStream("text_structure.xml")) {
NKJPTextDocument doc = NKJPTextDocument.parse(nkjpTextXmlIn);
Map<String, String> paras = doc.getParagraphs();
- assertEquals("To krótkie zdanie w drugim akapicie.", paras.get("ab-1"));
+ Assertions.assertEquals("To krótkie zdanie w drugim akapicie.", paras.get("ab-1"));
}
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/langdetect/DefaultLanguageDetectorContextGeneratorTest.java b/opennlp-tools/src/test/java/opennlp/tools/langdetect/DefaultLanguageDetectorContextGeneratorTest.java
index 29f45a58..53d01e30 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/langdetect/DefaultLanguageDetectorContextGeneratorTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/langdetect/DefaultLanguageDetectorContextGeneratorTest.java
@@ -20,24 +20,24 @@ package opennlp.tools.langdetect;
import java.util.Arrays;
import java.util.Collection;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
public class DefaultLanguageDetectorContextGeneratorTest {
@Test
- public void extractContext() throws Exception {
+ void extractContext() {
String doc = "abcde fghijk";
LanguageDetectorContextGenerator cg = new DefaultLanguageDetectorContextGenerator(1, 3);
Collection<String> features = Arrays.asList(cg.getContext(doc));
- Assert.assertEquals(33, features.size());
- Assert.assertTrue(features.contains("ab"));
- Assert.assertTrue(features.contains("abc"));
- Assert.assertTrue(features.contains("e f"));
- Assert.assertTrue(features.contains(" fg"));
+ Assertions.assertEquals(33, features.size());
+ Assertions.assertTrue(features.contains("ab"));
+ Assertions.assertTrue(features.contains("abc"));
+ Assertions.assertTrue(features.contains("e f"));
+ Assertions.assertTrue(features.contains(" fg"));
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorCrossValidatorTest.java b/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorCrossValidatorTest.java
index 520fc717..8b004dd4 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorCrossValidatorTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorCrossValidatorTest.java
@@ -19,8 +19,9 @@ package opennlp.tools.langdetect;
import java.util.concurrent.atomic.AtomicInteger;
-import org.junit.Assert;
-import org.junit.Test;
+
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.util.TrainingParameters;
@@ -57,8 +58,8 @@ public class LanguageDetectorCrossValidatorTest {
cv.evaluate(sampleStream, 2);
- Assert.assertEquals(99, cv.getDocumentCount());
- Assert.assertEquals(0.98989898989899, cv.getDocumentAccuracy(), 0.01);
+ Assertions.assertEquals(99, cv.getDocumentCount());
+ Assertions.assertEquals(0.98989898989899, cv.getDocumentAccuracy(), 0.01);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorEvaluatorTest.java b/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorEvaluatorTest.java
index dd89cda9..e0a02524 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorEvaluatorTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorEvaluatorTest.java
@@ -21,8 +21,8 @@ import java.io.ByteArrayOutputStream;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.atomic.AtomicInteger;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.cmdline.langdetect.LanguageDetectorEvaluationErrorListener;
@@ -30,7 +30,7 @@ import opennlp.tools.cmdline.langdetect.LanguageDetectorEvaluationErrorListener;
public class LanguageDetectorEvaluatorTest {
@Test
- public void processSample() throws Exception {
+ void processSample() throws Exception {
LanguageDetectorModel model = LanguageDetectorMETest.trainModel();
LanguageDetectorME langdetector = new LanguageDetectorME(model);
@@ -64,17 +64,17 @@ public class LanguageDetectorEvaluatorTest {
"escreve e faz palestras pelo mundo inteiro sobre anjos"));
- Assert.assertEquals(1, correctCount.get());
- Assert.assertEquals(2, incorrectCount.get());
+ Assertions.assertEquals(1, correctCount.get());
+ Assertions.assertEquals(2, incorrectCount.get());
- Assert.assertEquals(3, evaluator.getDocumentCount());
- Assert.assertEquals(0.33, evaluator.getAccuracy(), 0.01);
+ Assertions.assertEquals(3, evaluator.getDocumentCount());
+ Assertions.assertEquals(evaluator.getAccuracy(), 0.01, 0.33);
String report = outputStream.toString(StandardCharsets.UTF_8.name());
- Assert.assertEquals("Expected\tPredicted\tContext" + System.lineSeparator() +
+ Assertions.assertEquals("Expected\tPredicted\tContext" + System.lineSeparator() +
"fra\tpob\tescreve e faz palestras pelo mundo inteiro sobre anjos" + System.lineSeparator() +
- "fra\tpob\tescreve e faz palestras pelo mundo inteiro sobre anjos" + System.lineSeparator(), report);
+ "fra\tpob\tescreve e faz palestras pelo mundo inteiro sobre anjos" + System.lineSeparator(), report);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorFactoryTest.java b/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorFactoryTest.java
index 13cbe360..a176bb7f 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorFactoryTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorFactoryTest.java
@@ -25,9 +25,9 @@ import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.util.PlainTextByLineStream;
@@ -38,8 +38,8 @@ public class LanguageDetectorFactoryTest {
private static LanguageDetectorModel model;
- @BeforeClass
- public static void train() throws Exception {
+ @BeforeAll
+ static void train() throws Exception {
ResourceAsStreamFactory streamFactory = new ResourceAsStreamFactory(
LanguageDetectorMETest.class, "/opennlp/tools/doccat/DoccatSample.txt");
@@ -57,35 +57,35 @@ public class LanguageDetectorFactoryTest {
}
@Test
- public void testCorrectFactory() throws IOException {
+ void testCorrectFactory() throws IOException {
byte[] serialized = LanguageDetectorMETest.serializeModel(model);
LanguageDetectorModel myModel = new LanguageDetectorModel(new ByteArrayInputStream(serialized));
- Assert.assertTrue(myModel.getFactory() instanceof DummyFactory);
+ Assertions.assertTrue(myModel.getFactory() instanceof DummyFactory);
}
@Test
- public void testDummyFactory() throws Exception {
+ void testDummyFactory() throws Exception {
byte[] serialized = LanguageDetectorMETest.serializeModel(model);
LanguageDetectorModel myModel = new LanguageDetectorModel(new ByteArrayInputStream(serialized));
- Assert.assertTrue(myModel.getFactory() instanceof DummyFactory);
+ Assertions.assertTrue(myModel.getFactory() instanceof DummyFactory);
}
@Test
- public void testDummyFactoryContextGenerator() throws Exception {
+ void testDummyFactoryContextGenerator() {
LanguageDetectorContextGenerator cg = model.getFactory().getContextGenerator();
String[] context = cg.getContext(
"a dummy text phrase to test if the context generator works!!!!!!!!!!!!");
Set<String> set = new HashSet(Arrays.asList(context));
- Assert.assertTrue(set.contains("!!!!!")); // default normalizer would remove the repeated !
- Assert.assertTrue(set.contains("a dum"));
- Assert.assertTrue(set.contains("tg=[THE,CONTEXT,GENERATOR]"));
+ Assertions.assertTrue(set.contains("!!!!!")); // default normalizer would remove the repeated !
+ Assertions.assertTrue(set.contains("a dum"));
+ Assertions.assertTrue(set.contains("tg=[THE,CONTEXT,GENERATOR]"));
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorMETest.java b/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorMETest.java
index 56ac1ed2..f7474d1b 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorMETest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageDetectorMETest.java
@@ -21,9 +21,9 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.util.PlainTextByLineStream;
@@ -34,27 +34,27 @@ public class LanguageDetectorMETest {
private LanguageDetectorModel model;
- @Before
- public void init() throws Exception {
+ @BeforeEach
+ void init() throws Exception {
this.model = trainModel();
}
@Test
- public void testPredictLanguages() {
+ void testPredictLanguages() {
LanguageDetector ld = new LanguageDetectorME(this.model);
Language[] languages = ld.predictLanguages("estava em uma marcenaria na Rua Bruno");
- Assert.assertEquals(4, languages.length);
- Assert.assertEquals("pob", languages[0].getLang());
- Assert.assertEquals("ita", languages[1].getLang());
- Assert.assertEquals("spa", languages[2].getLang());
- Assert.assertEquals("fra", languages[3].getLang());
+ Assertions.assertEquals(4, languages.length);
+ Assertions.assertEquals("pob", languages[0].getLang());
+ Assertions.assertEquals("ita", languages[1].getLang());
+ Assertions.assertEquals("spa", languages[2].getLang());
+ Assertions.assertEquals("fra", languages[3].getLang());
}
@Test
- public void testProbingPredictLanguages() {
+ void testProbingPredictLanguages() {
LanguageDetectorME ld = new LanguageDetectorME(this.model);
for (int i = 0; i < 10000; i += 1000) {
StringBuilder sb = new StringBuilder();
@@ -62,40 +62,40 @@ public class LanguageDetectorMETest {
sb.append("estava em uma marcenaria na Rua Bruno ");
}
ProbingLanguageDetectionResult result = ld.probingPredictLanguages(sb.toString());
- Assert.assertTrue(result.getLength() <= 600);
+ Assertions.assertTrue(result.getLength() <= 600);
Language[] languages = result.getLanguages();
- Assert.assertEquals(4, languages.length);
- Assert.assertEquals("pob", languages[0].getLang());
- Assert.assertEquals("ita", languages[1].getLang());
- Assert.assertEquals("spa", languages[2].getLang());
- Assert.assertEquals("fra", languages[3].getLang());
+ Assertions.assertEquals(4, languages.length);
+ Assertions.assertEquals("pob", languages[0].getLang());
+ Assertions.assertEquals("ita", languages[1].getLang());
+ Assertions.assertEquals("spa", languages[2].getLang());
+ Assertions.assertEquals("fra", languages[3].getLang());
}
}
@Test
- public void testPredictLanguage() {
+ void testPredictLanguage() {
LanguageDetector ld = new LanguageDetectorME(this.model);
Language language = ld.predictLanguage("Dove è meglio che giochi");
- Assert.assertEquals("ita", language.getLang());
+ Assertions.assertEquals("ita", language.getLang());
}
@Test
- public void testSupportedLanguages() {
+ void testSupportedLanguages() {
LanguageDetector ld = new LanguageDetectorME(this.model);
String[] supportedLanguages = ld.getSupportedLanguages();
- Assert.assertEquals(4, supportedLanguages.length);
+ Assertions.assertEquals(4, supportedLanguages.length);
}
@Test
- public void testLoadFromSerialized() throws IOException {
+ void testLoadFromSerialized() throws IOException {
byte[] serialized = serializeModel(model);
LanguageDetectorModel myModel = new LanguageDetectorModel(new ByteArrayInputStream(serialized));
- Assert.assertNotNull(myModel);
+ Assertions.assertNotNull(myModel);
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageSampleTest.java b/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageSampleTest.java
index 5b7aac08..4a1a269c 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageSampleTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageSampleTest.java
@@ -25,24 +25,24 @@ import java.io.ObjectInputStream;
import java.io.ObjectOutput;
import java.io.ObjectOutputStream;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
public class LanguageSampleTest {
@Test
- public void testConstructor() {
+ void testConstructor() {
Language lang = new Language("aLang");
CharSequence context = "aContext";
LanguageSample sample = new LanguageSample(lang, context);
- Assert.assertEquals(lang, sample.getLanguage());
- Assert.assertEquals(context, sample.getContext());
+ Assertions.assertEquals(lang, sample.getLanguage());
+ Assertions.assertEquals(context, sample.getContext());
}
@Test
- public void testLanguageSampleSerDe() throws IOException {
+ void testLanguageSampleSerDe() throws IOException {
Language lang = new Language("aLang");
CharSequence context = "aContext";
@@ -64,61 +64,67 @@ public class LanguageSampleTest {
// do nothing
}
- Assert.assertNotNull(deSerializedLanguageSample);
- Assert.assertEquals(languageSample.getContext(), deSerializedLanguageSample.getContext());
- Assert.assertEquals(languageSample.getLanguage(), deSerializedLanguageSample.getLanguage());
- Assert.assertEquals(languageSample, deSerializedLanguageSample);
+ Assertions.assertNotNull(deSerializedLanguageSample);
+ Assertions.assertEquals(languageSample.getContext(), deSerializedLanguageSample.getContext());
+ Assertions.assertEquals(languageSample.getLanguage(), deSerializedLanguageSample.getLanguage());
+ Assertions.assertEquals(languageSample, deSerializedLanguageSample);
}
- @Test(expected = NullPointerException.class)
- public void testNullLang() throws Exception {
- CharSequence context = "aContext";
+ @Test
+ void testNullLang() {
+ Assertions.assertThrows(NullPointerException.class, () -> {
+ CharSequence context = "aContext";
+
+ new LanguageSample(null, context);
+ });
- new LanguageSample(null, context);
}
- @Test(expected = NullPointerException.class)
- public void testNullContext() {
- Language lang = new Language("aLang");
+ @Test
+ void testNullContext() {
+ Assertions.assertThrows(NullPointerException.class, () -> {
+ Language lang = new Language("aLang");
+
+ new LanguageSample(lang, null);
+ });
- new LanguageSample(lang, null);
}
@Test
- public void testToString() {
+ void testToString() {
Language lang = new Language("aLang");
CharSequence context = "aContext";
LanguageSample sample = new LanguageSample(lang, context);
- Assert.assertEquals(lang.getLang() + "\t" + context, sample.toString());
+ Assertions.assertEquals(lang.getLang() + "\t" + context, sample.toString());
}
@Test
- public void testHash() {
+ void testHash() {
int hashA = new LanguageSample(new Language("aLang"), "aContext").hashCode();
int hashB = new LanguageSample(new Language("bLang"), "aContext").hashCode();
int hashC = new LanguageSample(new Language("aLang"), "bContext").hashCode();
- Assert.assertNotEquals(hashA, hashB);
- Assert.assertNotEquals(hashA, hashC);
- Assert.assertNotEquals(hashB, hashC);
+ Assertions.assertNotEquals(hashA, hashB);
+ Assertions.assertNotEquals(hashA, hashC);
+ Assertions.assertNotEquals(hashB, hashC);
}
@Test
- public void testEquals() throws Exception {
+ void testEquals() {
LanguageSample sampleA = new LanguageSample(new Language("aLang"), "aContext");
LanguageSample sampleA1 = new LanguageSample(new Language("aLang"), "aContext");
LanguageSample sampleB = new LanguageSample(new Language("bLang"), "aContext");
LanguageSample sampleC = new LanguageSample(new Language("aLang"), "bContext");
- Assert.assertEquals(sampleA, sampleA);
- Assert.assertEquals(sampleA, sampleA1);
- Assert.assertNotEquals(sampleA, sampleB);
- Assert.assertNotEquals(sampleA, sampleC);
- Assert.assertNotEquals(sampleB, sampleC);
- Assert.assertNotEquals(sampleA, "something else");
+ Assertions.assertEquals(sampleA, sampleA);
+ Assertions.assertEquals(sampleA, sampleA1);
+ Assertions.assertNotEquals(sampleA, sampleB);
+ Assertions.assertNotEquals(sampleA, sampleC);
+ Assertions.assertNotEquals(sampleB, sampleC);
+ Assertions.assertNotEquals(sampleA, "something else");
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageTest.java b/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageTest.java
index dc25bc6b..8e610da9 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/langdetect/LanguageTest.java
@@ -17,85 +17,88 @@
package opennlp.tools.langdetect;
-import org.junit.Assert;
-import org.junit.Test;
-
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
public class LanguageTest {
@Test
- public void emptyConfidence() throws Exception {
+ void emptyConfidence() {
String languageCode = "aLanguage";
Language lang = new Language(languageCode);
- Assert.assertEquals(languageCode, lang.getLang());
- Assert.assertEquals(0, lang.getConfidence(), 0);
+ Assertions.assertEquals(languageCode, lang.getLang());
+ Assertions.assertEquals(0, lang.getConfidence(), 0);
}
@Test
- public void nonEmptyConfidence() throws Exception {
+ void nonEmptyConfidence() {
String languageCode = "aLanguage";
double confidence = 0.05;
Language lang = new Language(languageCode, confidence);
- Assert.assertEquals(languageCode, lang.getLang());
- Assert.assertEquals(confidence, lang.getConfidence(), 0);
+ Assertions.assertEquals(languageCode, lang.getLang());
+ Assertions.assertEquals(confidence, lang.getConfidence(), 0);
}
- @Test(expected = NullPointerException.class)
- public void emptyLanguage() throws Exception {
- new Language(null);
+ @Test
+ void emptyLanguage() {
+ Assertions.assertThrows(NullPointerException.class, () -> {
+ new Language(null);
+ });
}
- @Test(expected = NullPointerException.class)
- public void emptyLanguageConfidence() throws Exception {
- new Language(null, 0.05);
+ @Test
+ void emptyLanguageConfidence() {
+ Assertions.assertThrows(NullPointerException.class, () -> {
+ new Language(null, 0.05);
+ });
}
@Test
- public void testToString() {
+ void testToString() {
Language lang = new Language("aLang");
- Assert.assertEquals("aLang (0.0)", lang.toString());
+ Assertions.assertEquals("aLang (0.0)", lang.toString());
lang = new Language("aLang", 0.0886678);
- Assert.assertEquals("aLang (0.0886678)", lang.toString());
+ Assertions.assertEquals("aLang (0.0886678)", lang.toString());
}
@Test
- public void testHash() {
+ void testHash() {
int hashA = new Language("aLang").hashCode();
int hashAA = new Language("aLang").hashCode();
int hashB = new Language("BLang").hashCode();
int hashA5 = new Language("aLang", 5.0).hashCode();
int hashA6 = new Language("BLang", 6.0).hashCode();
- Assert.assertEquals(hashA, hashAA);
+ Assertions.assertEquals(hashA, hashAA);
- Assert.assertNotEquals(hashA, hashB);
- Assert.assertNotEquals(hashA, hashA5);
- Assert.assertNotEquals(hashB, hashA5);
- Assert.assertNotEquals(hashA5, hashA6);
+ Assertions.assertNotEquals(hashA, hashB);
+ Assertions.assertNotEquals(hashA, hashA5);
+ Assertions.assertNotEquals(hashB, hashA5);
+ Assertions.assertNotEquals(hashA5, hashA6);
}
@Test
- public void testEquals() {
+ void testEquals() {
Language langA = new Language("langA");
Language langB = new Language("langB");
Language langA5 = new Language("langA5", 5.0);
Language langA6 = new Language("langA5", 6.0);
- Assert.assertEquals(langA, langA);
- Assert.assertEquals(langA5, langA5);
+ Assertions.assertEquals(langA, langA);
+ Assertions.assertEquals(langA5, langA5);
- Assert.assertNotEquals(langA, langA5);
- Assert.assertNotEquals(langA, langB);
+ Assertions.assertNotEquals(langA, langA5);
+ Assertions.assertNotEquals(langA, langB);
- Assert.assertEquals(langA6, langA5);
+ Assertions.assertEquals(langA6, langA5);
- Assert.assertNotEquals(langA, "something else");
+ Assertions.assertNotEquals(langA, "something else");
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/languagemodel/LanguageModelEvaluationTest.java b/opennlp-tools/src/test/java/opennlp/tools/languagemodel/LanguageModelEvaluationTest.java
index eea0eb69..a63ca575 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/languagemodel/LanguageModelEvaluationTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/languagemodel/LanguageModelEvaluationTest.java
@@ -19,8 +19,8 @@ package opennlp.tools.languagemodel;
import java.util.Collection;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.util.StringList;
@@ -30,7 +30,7 @@ import opennlp.tools.util.StringList;
public class LanguageModelEvaluationTest {
@Test
- public void testPerplexityComparison() throws Exception {
+ void testPerplexityComparison() {
Collection<String[]> trainingVocabulary =
LanguageModelTestUtils.generateRandomVocabulary(1100000);
@@ -50,7 +50,7 @@ public class LanguageModelEvaluationTest {
}
double bigramPerplexity =
LanguageModelTestUtils.getPerplexity(bigramLM, testVocabulary, 2);
- Assert.assertTrue(unigramPerplexity >= bigramPerplexity);
+ Assertions.assertTrue(unigramPerplexity >= bigramPerplexity);
NGramLanguageModel trigramLM = new NGramLanguageModel(3);
for (String[] sentence : trainingVocabulary) {
@@ -58,7 +58,7 @@ public class LanguageModelEvaluationTest {
}
double trigramPerplexity =
LanguageModelTestUtils.getPerplexity(trigramLM, testVocabulary, 3);
- Assert.assertTrue(bigramPerplexity >= trigramPerplexity);
+ Assertions.assertTrue(bigramPerplexity >= trigramPerplexity);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/languagemodel/LanguageModelTestUtils.java b/opennlp-tools/src/test/java/opennlp/tools/languagemodel/LanguageModelTestUtils.java
index 17d380e6..fb78bc7a 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/languagemodel/LanguageModelTestUtils.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/languagemodel/LanguageModelTestUtils.java
@@ -23,20 +23,20 @@ import java.util.Collection;
import java.util.LinkedList;
import java.util.Random;
-import org.junit.Ignore;
+import org.junit.jupiter.api.Disabled;
import opennlp.tools.ngram.NGramUtils;
/**
* Utility class for language models tests
*/
-@Ignore
+@Disabled
public class LanguageModelTestUtils {
private static final java.math.MathContext CONTEXT = MathContext.DECIMAL128;
private static Random r = new Random();
- private static final char[] chars = new char[]{'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'};
+ private static final char[] chars = new char[] {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'};
public static Collection<String[]> generateRandomVocabulary(int size) {
Collection<String[]> vocabulary = new LinkedList<>();
diff --git a/opennlp-tools/src/test/java/opennlp/tools/languagemodel/NgramLanguageModelTest.java b/opennlp-tools/src/test/java/opennlp/tools/languagemodel/NgramLanguageModelTest.java
index 2091d3f0..896a3b99 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/languagemodel/NgramLanguageModelTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/languagemodel/NgramLanguageModelTest.java
@@ -22,8 +22,8 @@ import java.util.Arrays;
import java.util.List;
import org.apache.commons.io.IOUtils;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.ngram.NGramGenerator;
@@ -35,10 +35,10 @@ public class NgramLanguageModelTest {
@Test
public void testEmptyVocabularyProbability() {
NGramLanguageModel model = new NGramLanguageModel();
- Assert.assertEquals("probability with an empty vocabulary is always 0",
- 0d, model.calculateProbability(""), 0d);
- Assert.assertEquals("probability with an empty vocabulary is always 0",
- 0d, model.calculateProbability("1", "2", "3"), 0d);
+ Assertions.assertEquals(0d, model.calculateProbability(""), 0d,
+ "probability with an empty vocabulary is always 0");
+ Assertions.assertEquals(0d, model.calculateProbability("1", "2", "3"), 0d,
+ "probability with an empty vocabulary is always 0");
}
@Test
@@ -48,8 +48,8 @@ public class NgramLanguageModelTest {
model.add(sentence);
}
double probability = model.calculateProbability(LanguageModelTestUtils.generateRandomSentence());
- Assert.assertTrue("a probability measure should be between 0 and 1 [was "
- + probability + "]", probability >= 0 && probability <= 1);
+ Assertions.assertTrue(probability >= 0 && probability <= 1,
+ "a probability measure should be between 0 and 1 [was " + probability + "]");
}
@Test
@@ -59,12 +59,12 @@ public class NgramLanguageModelTest {
model.add("the", "red", "house");
model.add("I", "saw", "something", "nice");
double probability = model.calculateProbability("I", "saw", "the", "red", "house");
- Assert.assertTrue("a probability measure should be between 0 and 1 [was "
- + probability + "]", probability >= 0 && probability <= 1);
+ Assertions.assertTrue(probability >= 0 && probability <= 1,
+ "a probability measure should be between 0 and 1 [was " + probability + "]");
String[] tokens = model.predictNextTokens("I", "saw");
- Assert.assertNotNull(tokens);
- Assert.assertArrayEquals(new String[] {"the", "fox"}, tokens);
+ Assertions.assertNotNull(tokens);
+ Assertions.assertArrayEquals(new String[] {"the", "fox"}, tokens);
}
@Test
@@ -74,19 +74,19 @@ public class NgramLanguageModelTest {
model.add("<s>", "Sam", "I", "am", "</s>");
model.add("<s>", "I", "do", "not", "like", "green", "eggs", "and", "ham", "</s>");
double probability = model.calculateProbability("<s>", "I");
- Assert.assertEquals(0.666d, probability, 0.001);
+ Assertions.assertEquals(0.666d, probability, 0.001);
probability = model.calculateProbability("Sam", "</s>");
- Assert.assertEquals(0.5d, probability, 0.001);
+ Assertions.assertEquals(0.5d, probability, 0.001);
probability = model.calculateProbability("<s>", "Sam");
- Assert.assertEquals(0.333d, probability, 0.001);
+ Assertions.assertEquals(0.333d, probability, 0.001);
probability = model.calculateProbability("am", "Sam");
- Assert.assertEquals(0.5d, probability, 0.001);
+ Assertions.assertEquals(0.5d, probability, 0.001);
probability = model.calculateProbability("I", "am");
- Assert.assertEquals(0.666d, probability, 0.001);
+ Assertions.assertEquals(0.666d, probability, 0.001);
probability = model.calculateProbability("I", "do");
- Assert.assertEquals(0.333d, probability, 0.001);
+ Assertions.assertEquals(0.333d, probability, 0.001);
probability = model.calculateProbability("I", "am", "Sam");
- Assert.assertEquals(0.333d, probability, 0.001);
+ Assertions.assertEquals(0.333d, probability, 0.001);
}
@Test
@@ -96,12 +96,12 @@ public class NgramLanguageModelTest {
model.add("the", "red", "house");
model.add("I", "saw", "something", "nice");
double probability = model.calculateProbability("I", "saw", "the", "red", "house");
- Assert.assertTrue("a probability measure should be between 0 and 1 [was "
- + probability + "]", probability >= 0 && probability <= 1);
+ Assertions.assertTrue(probability >= 0 && probability <= 1,
+ "a probability measure should be between 0 and 1 [was " + probability + "]");
String[] tokens = model.predictNextTokens("I", "saw");
- Assert.assertNotNull(tokens);
- Assert.assertArrayEquals(new String[] {"something"}, tokens);
+ Assertions.assertNotNull(tokens);
+ Assertions.assertArrayEquals(new String[] {"something"}, tokens);
}
@Test
@@ -111,12 +111,12 @@ public class NgramLanguageModelTest {
model.add("the", "red", "house");
model.add("I", "saw", "something", "nice");
double probability = model.calculateProbability("I", "saw", "the", "red", "house");
- Assert.assertTrue("a probability measure should be between 0 and 1 [was " + probability + "]",
- probability >= 0 && probability <= 1);
+ Assertions.assertTrue(probability >= 0 && probability <= 1,
+ "a probability measure should be between 0 and 1 [was " + probability + "]");
String[] tokens = model.predictNextTokens("I", "saw");
- Assert.assertNotNull(tokens);
- Assert.assertArrayEquals(new String[] {"something"}, tokens);
+ Assertions.assertNotNull(tokens);
+ Assertions.assertArrayEquals(new String[] {"something"}, tokens);
}
@Test
@@ -124,11 +124,11 @@ public class NgramLanguageModelTest {
NGramLanguageModel languageModel = new NGramLanguageModel(getClass().getResourceAsStream(
"/opennlp/tools/ngram/ngram-model.xml"), 3);
double probability = languageModel.calculateProbability("The", "brown", "fox", "jumped");
- Assert.assertTrue("a probability measure should be between 0 and 1 [was " + probability + "]",
- probability >= 0 && probability <= 1);
+ Assertions.assertTrue(probability >= 0 && probability <= 1,
+ "a probability measure should be between 0 and 1 [was " + probability + "]");
String[] tokens = languageModel.predictNextTokens("the", "brown", "fox");
- Assert.assertNotNull(tokens);
- Assert.assertArrayEquals(new String[] {"jumped"}, tokens);
+ Assertions.assertNotNull(tokens);
+ Assertions.assertArrayEquals(new String[] {"jumped"}, tokens);
}
@Test
@@ -149,12 +149,12 @@ public class NgramLanguageModelTest {
}
String[] tokens = languageModel.predictNextTokens("neural",
"network", "language");
- Assert.assertNotNull(tokens);
- Assert.assertArrayEquals(new String[] {"models"}, tokens);
+ Assertions.assertNotNull(tokens);
+ Assertions.assertArrayEquals(new String[] {"models"}, tokens);
double p1 = languageModel.calculateProbability("neural", "network",
"language", "models");
double p2 = languageModel.calculateProbability("neural", "network",
"language", "model");
- Assert.assertTrue(p1 > p2);
+ Assertions.assertTrue(p1 > p2);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/DictionaryLemmatizerMultiTest.java b/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/DictionaryLemmatizerMultiTest.java
index d29830b8..cbb78e9b 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/DictionaryLemmatizerMultiTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/DictionaryLemmatizerMultiTest.java
@@ -20,27 +20,27 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
public class DictionaryLemmatizerMultiTest {
private static DictionaryLemmatizer dictionaryLemmatizer;
- @BeforeClass
- public static void loadDictionary() throws Exception {
+ @BeforeAll
+ static void loadDictionary() throws Exception {
dictionaryLemmatizer = new DictionaryLemmatizer(
DictionaryLemmatizerTest.class.getResourceAsStream(
- "/opennlp/tools/lemmatizer/smalldictionarymulti.dict")
+ "/opennlp/tools/lemmatizer/smalldictionarymulti.dict")
);
}
-
+
@Test
- public void testForNullPointerException() {
- List<String> sentence = Arrays.asList("The","dogs","were","running","and","barking",
- "down","the","street");
- List<String> sentencePOS = Arrays.asList("DT","NNS","VBD","VBG","CC","VBG","RP","DT","NN");
+ void testForNullPointerException() {
+ List<String> sentence = Arrays.asList("The", "dogs", "were", "running", "and", "barking",
+ "down", "the", "street");
+ List<String> sentencePOS = Arrays.asList("DT", "NNS", "VBD", "VBG", "CC", "VBG", "RP", "DT", "NN");
List<List<String>> expectedLemmas = new ArrayList<>();
expectedLemmas.add(Arrays.asList("the"));
expectedLemmas.add(Arrays.asList("dog"));
@@ -51,13 +51,14 @@ public class DictionaryLemmatizerMultiTest {
expectedLemmas.add(Arrays.asList("down"));
expectedLemmas.add(Arrays.asList("the"));
expectedLemmas.add(Arrays.asList("street"));
-
+
List<List<String>> actualLemmas = dictionaryLemmatizer.lemmatize(sentence, sentencePOS);
-
+
for (int i = 0; i < sentence.size(); i++) {
// don't compare cases where the word is not in the dictionary...
- if (!actualLemmas.get(0).get(0).equals("O"))
- Assert.assertEquals(expectedLemmas.get(i), actualLemmas.get(i));
+ if (!actualLemmas.get(0).get(0).equals("O")) {
+ Assertions.assertEquals(expectedLemmas.get(i), actualLemmas.get(i));
+ }
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/DictionaryLemmatizerTest.java b/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/DictionaryLemmatizerTest.java
index 6cf72cff..add85b2d 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/DictionaryLemmatizerTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/DictionaryLemmatizerTest.java
@@ -17,32 +17,35 @@
package opennlp.tools.lemmatizer;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
public class DictionaryLemmatizerTest {
private static DictionaryLemmatizer dictionaryLemmatizer;
- @BeforeClass
- public static void loadDictionary() throws Exception {
+ @BeforeAll
+ static void loadDictionary() throws Exception {
dictionaryLemmatizer = new DictionaryLemmatizer(
- DictionaryLemmatizerTest.class.getResourceAsStream("/opennlp/tools/lemmatizer/smalldictionary.dict")
+ DictionaryLemmatizerTest.class.getResourceAsStream("/opennlp/tools/lemmatizer/smalldictionary.dict")
);
}
-
+
@Test
- public void testForNullPointerException() {
- String[] sentence = new String[]{"The","dogs","were","running","and","barking","down","the","street"};
- String[] sentencePOS = new String[]{"DT","NNS","VBD","VBG","CC","VBG","RP","DT","NN"};
- String[] expectedLemma = new String[]{"the","dog","is","run","and","bark","down","the","street"};
-
+ void testForNullPointerException() {
+ String[] sentence = new String[] {"The", "dogs", "were", "running", "and", "barking",
+ "down", "the", "street"};
+ String[] sentencePOS = new String[] {"DT", "NNS", "VBD", "VBG", "CC", "VBG", "RP", "DT", "NN"};
+ String[] expectedLemma = new String[] {"the", "dog", "is", "run", "and", "bark", "down", "the", "street"};
+
String[] actualLemma = dictionaryLemmatizer.lemmatize(sentence, sentencePOS);
-
- for (int i = 0;i < sentence.length;i++) {
+
+ for (int i = 0; i < sentence.length; i++) {
// don't compare cases where the word is not in the dictionary...
- if (!actualLemma[i].equals("O")) Assert.assertEquals(expectedLemma[i], actualLemma[i]);
+ if (!actualLemma[i].equals("O")) {
+ Assertions.assertEquals(expectedLemma[i], actualLemma[i]);
+ }
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/LemmaSampleTest.java b/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/LemmaSampleTest.java
index bfb6cd23..c6d8bed9 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/LemmaSampleTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/LemmaSampleTest.java
@@ -27,34 +27,36 @@ import java.io.ObjectOutput;
import java.io.ObjectOutputStream;
import java.io.StringReader;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
public class LemmaSampleTest {
- @Test(expected = IllegalArgumentException.class)
- public void testParameterValidation() {
- new LemmaSample(new String[] { "" }, new String[] { "" },
- new String[] { "test", "one element to much" });
+ @Test
+ void testParameterValidation() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ new LemmaSample(new String[] {""}, new String[] {""},
+ new String[] {"test", "one element to much"});
+ });
}
private static String[] createSentence() {
- return new String[] { "Forecasts", "for", "the", "trade", "figures",
- "range", "widely", "." };
+ return new String[] {"Forecasts", "for", "the", "trade", "figures",
+ "range", "widely", "."};
}
private static String[] createTags() {
- return new String[] { "NNS", "IN", "DT", "NN", "NNS", "VBP", "RB", "." };
+ return new String[] {"NNS", "IN", "DT", "NN", "NNS", "VBP", "RB", "."};
}
private static String[] createLemmas() {
- return new String[] { "Forecast", "for", "the", "trade", "figure", "range",
- "widely", "." };
+ return new String[] {"Forecast", "for", "the", "trade", "figure", "range",
+ "widely", "."};
}
@Test
- public void testLemmaSampleSerDe() throws IOException {
+ void testLemmaSampleSerDe() throws IOException {
LemmaSample lemmaSample = createGoldSample();
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
ObjectOutput out = new ObjectOutputStream(byteArrayOutputStream);
@@ -72,23 +74,23 @@ public class LemmaSampleTest {
// do nothing
}
- Assert.assertNotNull(deSerializedLemmaSample);
- Assert.assertArrayEquals(lemmaSample.getLemmas(), deSerializedLemmaSample.getLemmas());
- Assert.assertArrayEquals(lemmaSample.getTokens(), deSerializedLemmaSample.getTokens());
- Assert.assertArrayEquals(lemmaSample.getTags(), deSerializedLemmaSample.getTags());
+ Assertions.assertNotNull(deSerializedLemmaSample);
+ Assertions.assertArrayEquals(lemmaSample.getLemmas(), deSerializedLemmaSample.getLemmas());
+ Assertions.assertArrayEquals(lemmaSample.getTokens(), deSerializedLemmaSample.getTokens());
+ Assertions.assertArrayEquals(lemmaSample.getTags(), deSerializedLemmaSample.getTags());
}
@Test
- public void testRetrievingContent() {
+ void testRetrievingContent() {
LemmaSample sample = new LemmaSample(createSentence(), createTags(), createLemmas());
- Assert.assertArrayEquals(createSentence(), sample.getTokens());
- Assert.assertArrayEquals(createTags(), sample.getTags());
- Assert.assertArrayEquals(createLemmas(), sample.getLemmas());
+ Assertions.assertArrayEquals(createSentence(), sample.getTokens());
+ Assertions.assertArrayEquals(createTags(), sample.getTags());
+ Assertions.assertArrayEquals(createLemmas(), sample.getLemmas());
}
@Test
- public void testToString() throws IOException {
+ void testToString() throws IOException {
LemmaSample sample = new LemmaSample(createSentence(), createTags(),
createLemmas());
@@ -101,19 +103,19 @@ public class LemmaSampleTest {
for (int i = 0; i < sentence.length; i++) {
String line = reader.readLine();
String[] parts = line.split("\t");
- Assert.assertEquals(3, parts.length);
- Assert.assertEquals(sentence[i], parts[0]);
- Assert.assertEquals(tags[i], parts[1]);
- Assert.assertEquals(lemmas[i], parts[2]);
+ Assertions.assertEquals(3, parts.length);
+ Assertions.assertEquals(sentence[i], parts[0]);
+ Assertions.assertEquals(tags[i], parts[1]);
+ Assertions.assertEquals(lemmas[i], parts[2]);
}
}
@Test
- public void testEquals() {
- Assert.assertFalse(createGoldSample() == createGoldSample());
- Assert.assertTrue(createGoldSample().equals(createGoldSample()));
- Assert.assertFalse(createPredSample().equals(createGoldSample()));
- Assert.assertFalse(createPredSample().equals(new Object()));
+ void testEquals() {
+ Assertions.assertFalse(createGoldSample() == createGoldSample());
+ Assertions.assertTrue(createGoldSample().equals(createGoldSample()));
+ Assertions.assertFalse(createPredSample().equals(createGoldSample()));
+ Assertions.assertFalse(createPredSample().equals(new Object()));
}
public static LemmaSample createGoldSample() {
diff --git a/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/LemmatizerEvaluatorTest.java b/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/LemmatizerEvaluatorTest.java
index f2e0e11d..f8a49641 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/LemmatizerEvaluatorTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/LemmatizerEvaluatorTest.java
@@ -23,8 +23,8 @@ import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.cmdline.lemmatizer.LemmaEvaluationErrorListener;
import opennlp.tools.util.MockInputStreamFactory;
@@ -47,7 +47,7 @@ public class LemmatizerEvaluatorTest {
* @throws IOException
*/
@Test
- public void testEvaluator() throws IOException {
+ void testEvaluator() throws IOException {
String inPredicted = "opennlp/tools/lemmatizer/output.txt";
String inExpected = "opennlp/tools/lemmatizer/output.txt";
@@ -55,11 +55,11 @@ public class LemmatizerEvaluatorTest {
DummyLemmaSampleStream predictedSample = new DummyLemmaSampleStream(
new PlainTextByLineStream(
- new MockInputStreamFactory(new File(inPredicted)), encoding), true);
+ new MockInputStreamFactory(new File(inPredicted)), encoding), true);
DummyLemmaSampleStream expectedSample = new DummyLemmaSampleStream(
new PlainTextByLineStream(
- new MockInputStreamFactory(new File(inExpected)), encoding), false);
+ new MockInputStreamFactory(new File(inExpected)), encoding), false);
Lemmatizer dummyLemmatizer = new DummyLemmatizer(predictedSample);
@@ -69,8 +69,8 @@ public class LemmatizerEvaluatorTest {
evaluator.evaluate(expectedSample);
- Assert.assertEquals(0.9877049180327869, evaluator.getWordAccuracy(), DELTA);
- Assert.assertNotSame(stream.toString().length(), 0);
+ Assertions.assertEquals(0.9877049180327869, evaluator.getWordAccuracy(), DELTA);
+ Assertions.assertNotSame(0, stream.toString().length());
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/LemmatizerMETest.java b/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/LemmatizerMETest.java
index 53b3511e..40a3757f 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/LemmatizerMETest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/lemmatizer/LemmatizerMETest.java
@@ -21,9 +21,9 @@ import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import opennlp.tools.util.InsufficientTrainingDataException;
import opennlp.tools.util.MockInputStreamFactory;
@@ -46,28 +46,29 @@ import opennlp.tools.util.TrainingParameters;
* training sentences and then the computed model is used to predict sentences
* from the training sentences.
*/
+
public class LemmatizerMETest {
private LemmatizerME lemmatizer;
- private static String[] tokens = { "Rockwell", "said", "the", "agreement", "calls", "for",
+ private static String[] tokens = {"Rockwell", "said", "the", "agreement", "calls", "for",
"it", "to", "supply", "200", "additional", "so-called", "shipsets", "for",
- "the", "planes", "." };
+ "the", "planes", "."};
- private static String[] postags = { "NNP", "VBD", "DT", "NN", "VBZ", "IN", "PRP", "TO", "VB",
- "CD", "JJ", "JJ", "NNS", "IN", "DT", "NNS", "." };
+ private static String[] postags = {"NNP", "VBD", "DT", "NN", "VBZ", "IN", "PRP", "TO", "VB",
+ "CD", "JJ", "JJ", "NNS", "IN", "DT", "NNS", "."};
- private static String[] expect = { "rockwell", "say", "the", "agreement", "call", "for",
+ private static String[] expect = {"rockwell", "say", "the", "agreement", "call", "for",
"it", "to", "supply", "200", "additional", "so-called", "shipset", "for",
- "the", "plane", "." };
+ "the", "plane", "."};
- @Before
- public void startup() throws IOException {
+ @BeforeEach
+ void startup() throws IOException {
// train the lemmatizer
ObjectStream<LemmaSample> sampleStream = new LemmaSampleStream(
new PlainTextByLineStream(new MockInputStreamFactory(
- new File("opennlp/tools/lemmatizer/trial.old.tsv")), StandardCharsets.UTF_8));
+ new File("opennlp/tools/lemmatizer/trial.old.tsv")), StandardCharsets.UTF_8));
TrainingParameters params = new TrainingParameters();
params.put(TrainingParameters.ITERATIONS_PARAM, 100);
@@ -80,25 +81,30 @@ public class LemmatizerMETest {
}
@Test
- public void testLemmasAsArray() throws Exception {
+ void testLemmasAsArray() {
String[] lemmas = lemmatizer.lemmatize(tokens, postags);
- Assert.assertArrayEquals(expect, lemmas);
+ Assertions.assertArrayEquals(expect, lemmas);
}
-
- @Test(expected = InsufficientTrainingDataException.class)
- public void testInsufficientData() throws IOException {
-
- ObjectStream<LemmaSample> sampleStream = new LemmaSampleStream(
- new PlainTextByLineStream(new MockInputStreamFactory(
- new File("opennlp/tools/lemmatizer/trial.old-insufficient.tsv")), StandardCharsets.UTF_8));
- TrainingParameters params = new TrainingParameters();
- params.put(TrainingParameters.ITERATIONS_PARAM, 100);
- params.put(TrainingParameters.CUTOFF_PARAM, 5);
+ @Test
+ void testInsufficientData() {
+
+ Assertions.assertThrows(InsufficientTrainingDataException.class, () -> {
+
+ ObjectStream<LemmaSample> sampleStream = new LemmaSampleStream(
+ new PlainTextByLineStream(new MockInputStreamFactory(
+ new File("opennlp/tools/lemmatizer/trial.old-insufficient.tsv")), StandardCharsets.UTF_8));
+
+ TrainingParameters params = new TrainingParameters();
+ params.put(TrainingParameters.ITERATIONS_PARAM, 100);
+ params.put(TrainingParameters.CUTOFF_PARAM, 5);
+
+ LemmatizerME.train("eng", sampleStream, params, new LemmatizerFactory());
+
+ });
- LemmatizerME.train("eng", sampleStream, params, new LemmatizerFactory());
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/ArrayMathTest.java b/opennlp-tools/src/test/java/opennlp/tools/ml/ArrayMathTest.java
index 6b50aa5d..82974d27 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/ArrayMathTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/ArrayMathTest.java
@@ -20,96 +20,101 @@ package opennlp.tools.ml;
import java.util.Arrays;
import java.util.Collections;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
public class ArrayMathTest {
@Test
- public void testInnerProductDoubleNaN() throws Exception {
- Assert.assertTrue(Double.isNaN(ArrayMath.innerProduct(null, new double[]{0})));
- Assert.assertTrue(Double.isNaN(ArrayMath.innerProduct(new double[]{0}, null)));
- Assert.assertTrue(Double.isNaN(ArrayMath.innerProduct(new double[]{0, 1, 2}, new double[]{0, 1, 2, 3})));
+ public void testInnerProductDoubleNaN() {
+ Assertions.assertTrue(Double.isNaN(ArrayMath.innerProduct(null, new double[] {0})));
+ Assertions.assertTrue(Double.isNaN(ArrayMath.innerProduct(new double[] {0}, null)));
+ Assertions.assertTrue(Double.isNaN(ArrayMath.innerProduct(new double[] {0, 1, 2},
+ new double[] {0, 1, 2, 3})));
}
@Test
- public void testInnerProduct() throws Exception {
- Assert.assertEquals(0, ArrayMath.innerProduct(new double[] {}, new double[] {}), 0);
- Assert.assertEquals(-1, ArrayMath.innerProduct(new double[] {1}, new double[] {-1}), 0);
- Assert.assertEquals(14, ArrayMath.innerProduct(new double[] {1, 2, 3}, new double[] {1, 2, 3}), 0);
+ public void testInnerProduct() {
+ Assertions.assertEquals(0, ArrayMath.innerProduct(new double[] {}, new double[] {}), 0);
+ Assertions.assertEquals(-1, ArrayMath.innerProduct(new double[] {1}, new double[] {-1}), 0);
+ Assertions.assertEquals(14, ArrayMath.innerProduct(new double[] {1, 2, 3}, new double[] {1, 2, 3}), 0);
}
@Test
- public void testL1Norm() throws Exception {
- Assert.assertEquals(0, ArrayMath.l1norm(new double[]{}), 0);
- Assert.assertEquals(0, ArrayMath.l1norm(new double[] {0}), 0);
- Assert.assertEquals(2, ArrayMath.l1norm(new double[] {1, -1}), 0);
- Assert.assertEquals(55, ArrayMath.l1norm(new double[] {1, -2, 3, -4, 5, -6, 7, -8, 9, -10}), 0);
+ public void testL1Norm() {
+ Assertions.assertEquals(0, ArrayMath.l1norm(new double[] {}), 0);
+ Assertions.assertEquals(0, ArrayMath.l1norm(new double[] {0}), 0);
+ Assertions.assertEquals(2, ArrayMath.l1norm(new double[] {1, -1}), 0);
+ Assertions.assertEquals(55, ArrayMath.l1norm(new double[] {1, -2, 3, -4, 5, -6, 7, -8, 9, -10}), 0);
}
@Test
- public void testL2Norm() throws Exception {
- Assert.assertEquals(0, ArrayMath.l2norm(new double[] {}), 0);
- Assert.assertEquals(0, ArrayMath.l2norm(new double[] {0}), 0);
- Assert.assertEquals(1.41421, ArrayMath.l2norm(new double[] {1, -1}), 0.001);
- Assert.assertEquals(0.54772, ArrayMath.l2norm(new double[] {0.1, -0.2, 0.3, -0.4}), 0.001);
+ public void testL2Norm() {
+ Assertions.assertEquals(0, ArrayMath.l2norm(new double[] {}), 0);
+ Assertions.assertEquals(0, ArrayMath.l2norm(new double[] {0}), 0);
+ Assertions.assertEquals(1.41421, ArrayMath.l2norm(new double[] {1, -1}), 0.001);
+ Assertions.assertEquals(0.54772, ArrayMath.l2norm(new double[] {0.1, -0.2, 0.3, -0.4}), 0.001);
}
@Test
- public void testInvL2Norm() throws Exception {
- Assert.assertEquals(0.70711, ArrayMath.invL2norm(new double[] {1, -1}), 0.001);
- Assert.assertEquals(1.82575, ArrayMath.invL2norm(new double[] {0.1, -0.2, 0.3, -0.4}), 0.001);
+ public void testInvL2Norm() {
+ Assertions.assertEquals(0.70711, ArrayMath.invL2norm(new double[] {1, -1}), 0.001);
+ Assertions.assertEquals(1.82575, ArrayMath.invL2norm(new double[] {0.1, -0.2, 0.3, -0.4}), 0.001);
}
@Test
- public void testLogSumOfExps() throws Exception {
- Assert.assertEquals(0, ArrayMath.logSumOfExps(new double[] {0}), 0);
- Assert.assertEquals(1, ArrayMath.logSumOfExps(new double[] {1}), 0);
- Assert.assertEquals(2.048587, ArrayMath.logSumOfExps(new double[] {-1, 2}), 0.001);
- Assert.assertEquals(1.472216, ArrayMath.logSumOfExps(new double[] {-0.1, 0.2, -0.3, 0.4}), 0.001);
+ public void testLogSumOfExps() {
+ Assertions.assertEquals(0, ArrayMath.logSumOfExps(new double[] {0}), 0);
+ Assertions.assertEquals(1, ArrayMath.logSumOfExps(new double[] {1}), 0);
+ Assertions.assertEquals(2.048587, ArrayMath.logSumOfExps(new double[] {-1, 2}), 0.001);
+ Assertions.assertEquals(1.472216, ArrayMath.logSumOfExps(new double[] {-0.1, 0.2, -0.3, 0.4}), 0.001);
}
@Test
- public void testMax() throws Exception {
- Assert.assertEquals(0, ArrayMath.max(new double[] {0}), 0);
- Assert.assertEquals(0, ArrayMath.max(new double[] {0, 0, 0}), 0);
- Assert.assertEquals(2, ArrayMath.max(new double[] {0, 1, 2}), 0);
- Assert.assertEquals(200, ArrayMath.max(new double[] {100, 200, 2}), 0);
- Assert.assertEquals(300, ArrayMath.max(new double[] {100, 200, 300, -10, -20}), 0);
+ public void testMax() {
+ Assertions.assertEquals(0, ArrayMath.max(new double[] {0}), 0);
+ Assertions.assertEquals(0, ArrayMath.max(new double[] {0, 0, 0}), 0);
+ Assertions.assertEquals(2, ArrayMath.max(new double[] {0, 1, 2}), 0);
+ Assertions.assertEquals(200, ArrayMath.max(new double[] {100, 200, 2}), 0);
+ Assertions.assertEquals(300, ArrayMath.max(new double[] {100, 200, 300, -10, -20}), 0);
}
- @Test(expected = IllegalArgumentException.class)
- public void testArgmaxException1() throws Exception {
- ArrayMath.argmax(null);
+ @Test
+ public void testArgmaxException1() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ ArrayMath.argmax(null);
+ });
}
- @Test(expected = IllegalArgumentException.class)
- public void testArgmaxException2() throws Exception {
- ArrayMath.argmax(new double[]{});
+ @Test
+ public void testArgmaxException2() {
+ Assertions.assertThrows(IllegalArgumentException.class, () -> {
+ ArrayMath.argmax(new double[] {});
+ });
}
@Test
- public void testArgmax() throws Exception {
- Assert.assertEquals(0, ArrayMath.argmax(new double[] {0}));
- Assert.assertEquals(0, ArrayMath.argmax(new double[] {0, 0, 0}));
- Assert.assertEquals(2, ArrayMath.argmax(new double[] {0, 1, 2}));
- Assert.assertEquals(1, ArrayMath.argmax(new double[] {100, 200, 2}));
- Assert.assertEquals(2, ArrayMath.argmax(new double[] {100, 200, 300, -10, -20}));
+ public void testArgmax() {
+ Assertions.assertEquals(0, ArrayMath.argmax(new double[] {0}));
+ Assertions.assertEquals(0, ArrayMath.argmax(new double[] {0, 0, 0}));
+ Assertions.assertEquals(2, ArrayMath.argmax(new double[] {0, 1, 2}));
+ Assertions.assertEquals(1, ArrayMath.argmax(new double[] {100, 200, 2}));
+ Assertions.assertEquals(2, ArrayMath.argmax(new double[] {100, 200, 300, -10, -20}));
}
@Test
- public void testToDoubleArray() throws Exception {
- Assert.assertEquals(0, ArrayMath.toDoubleArray(Collections.EMPTY_LIST).length);
- Assert.assertArrayEquals(new double[] {0}, ArrayMath.toDoubleArray(Arrays.asList(0D)), 0);
- Assert.assertArrayEquals(new double[] {0, 1, -2.5, -0.3, 4},
+ public void testToDoubleArray() {
+ Assertions.assertEquals(0, ArrayMath.toDoubleArray(Collections.EMPTY_LIST).length);
+ Assertions.assertArrayEquals(new double[] {0}, ArrayMath.toDoubleArray(Arrays.asList(0D)), 0);
+ Assertions.assertArrayEquals(new double[] {0, 1, -2.5, -0.3, 4},
ArrayMath.toDoubleArray(Arrays.asList(0D, 1D, -2.5D, -0.3D, 4D)), 0);
}
@Test
- public void testToIntArray() throws Exception {
- Assert.assertEquals(0, ArrayMath.toIntArray(Collections.EMPTY_LIST).length);
- Assert.assertArrayEquals(new int[] {0}, ArrayMath.toIntArray(Arrays.asList(0)));
- Assert.assertArrayEquals(new int[] {0, 1, -2, -3, 4},
+ public void testToIntArray() {
+ Assertions.assertEquals(0, ArrayMath.toIntArray(Collections.EMPTY_LIST).length);
+ Assertions.assertArrayEquals(new int[] {0}, ArrayMath.toIntArray(Arrays.asList(0)));
+ Assertions.assertArrayEquals(new int[] {0, 1, -2, -3, 4},
ArrayMath.toIntArray(Arrays.asList(0, 1, -2, -3, 4)));
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/BeamSearchTest.java b/opennlp-tools/src/test/java/opennlp/tools/ml/BeamSearchTest.java
index 46d04404..2e49d666 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/BeamSearchTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/BeamSearchTest.java
@@ -20,8 +20,8 @@ package opennlp.tools.ml;
import java.util.HashMap;
import java.util.Map;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.ml.model.MaxentModel;
import opennlp.tools.util.BeamSearchContextGenerator;
@@ -38,7 +38,7 @@ public class BeamSearchTest {
}
public String[] getContext(int index, String[] sequence,
- String[] priorDecisions, Object[] additionalContext) {
+ String[] priorDecisions, Object[] additionalContext) {
return new String[] {outcomeSequence[index]};
}
}
@@ -70,8 +70,7 @@ public class BeamSearchTest {
for (int i = 0; i < probs.length; i++) {
if (outcomes[i].equals(context[0])) {
probs[i] = bestOutcomeProb;
- }
- else {
+ } else {
probs[i] = otherOutcomeProb;
}
}
@@ -116,7 +115,7 @@ public class BeamSearchTest {
* Tests that beam search does not fail to detect an empty sequence.
*/
@Test
- public void testBestSequenceZeroLengthInput() {
+ void testBestSequenceZeroLengthInput() {
String[] sequence = new String[0];
BeamSearchContextGenerator<String> cg = new IdentityFeatureGenerator(sequence);
@@ -128,16 +127,16 @@ public class BeamSearchTest {
Sequence seq = bs.bestSequence(sequence, null, cg,
(int i, String[] inputSequence, String[] outcomesSequence, String outcome) -> true);
-
- Assert.assertNotNull(seq);
- Assert.assertEquals(sequence.length, seq.getOutcomes().size());
+
+ Assertions.assertNotNull(seq);
+ Assertions.assertEquals(sequence.length, seq.getOutcomes().size());
}
/**
* Tests finding a sequence of length one.
*/
@Test
- public void testBestSequenceOneElementInput() {
+ void testBestSequenceOneElementInput() {
String[] sequence = {"1"};
BeamSearchContextGenerator<String> cg = new IdentityFeatureGenerator(sequence);
@@ -148,18 +147,18 @@ public class BeamSearchTest {
Sequence seq = bs.bestSequence(sequence, null, cg,
(int i, String[] inputSequence, String[] outcomesSequence,
- String outcome) -> true);
+ String outcome) -> true);
- Assert.assertNotNull(seq);
- Assert.assertEquals(sequence.length, seq.getOutcomes().size());
- Assert.assertEquals("1", seq.getOutcomes().get(0));
+ Assertions.assertNotNull(seq);
+ Assertions.assertEquals(sequence.length, seq.getOutcomes().size());
+ Assertions.assertEquals("1", seq.getOutcomes().get(0));
}
/**
* Tests finding the best sequence on a short input sequence.
*/
@Test
- public void testBestSequence() {
+ void testBestSequence() {
String[] sequence = {"1", "2", "3", "2", "1"};
BeamSearchContextGenerator<String> cg = new IdentityFeatureGenerator(sequence);
@@ -170,22 +169,22 @@ public class BeamSearchTest {
Sequence seq = bs.bestSequence(sequence, null, cg,
(int i, String[] inputSequence, String[] outcomesSequence,
- String outcome) -> true);
-
- Assert.assertNotNull(seq);
- Assert.assertEquals(sequence.length, seq.getOutcomes().size());
- Assert.assertEquals("1", seq.getOutcomes().get(0));
- Assert.assertEquals("2", seq.getOutcomes().get(1));
- Assert.assertEquals("3", seq.getOutcomes().get(2));
- Assert.assertEquals("2", seq.getOutcomes().get(3));
- Assert.assertEquals("1", seq.getOutcomes().get(4));
+ String outcome) -> true);
+
+ Assertions.assertNotNull(seq);
+ Assertions.assertEquals(sequence.length, seq.getOutcomes().size());
+ Assertions.assertEquals("1", seq.getOutcomes().get(0));
+ Assertions.assertEquals("2", seq.getOutcomes().get(1));
+ Assertions.assertEquals("3", seq.getOutcomes().get(2));
+ Assertions.assertEquals("2", seq.getOutcomes().get(3));
+ Assertions.assertEquals("1", seq.getOutcomes().get(4));
}
/**
* Tests finding the best sequence on a short input sequence.
*/
@Test
- public void testBestSequenceWithValidator() {
+ void testBestSequenceWithValidator() {
String[] sequence = {"1", "2", "3", "2", "1"};
BeamSearchContextGenerator<String> cg = new IdentityFeatureGenerator(sequence);
@@ -197,12 +196,12 @@ public class BeamSearchTest {
Sequence seq = bs.bestSequence(sequence, null, cg,
(int i, String[] inputSequence, String[] outcomesSequence,
String outcome) -> !"2".equals(outcome));
- Assert.assertNotNull(seq);
- Assert.assertEquals(sequence.length, seq.getOutcomes().size());
- Assert.assertEquals("1", seq.getOutcomes().get(0));
- Assert.assertNotSame("2", seq.getOutcomes().get(1));
- Assert.assertEquals("3", seq.getOutcomes().get(2));
- Assert.assertNotSame("2", seq.getOutcomes().get(3));
- Assert.assertEquals("1", seq.getOutcomes().get(4));
+ Assertions.assertNotNull(seq);
+ Assertions.assertEquals(sequence.length, seq.getOutcomes().size());
+ Assertions.assertEquals("1", seq.getOutcomes().get(0));
+ Assertions.assertNotSame("2", seq.getOutcomes().get(1));
+ Assertions.assertEquals("3", seq.getOutcomes().get(2));
+ Assertions.assertNotSame("2", seq.getOutcomes().get(3));
+ Assertions.assertEquals("1", seq.getOutcomes().get(4));
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/MockEventTrainer.java b/opennlp-tools/src/test/java/opennlp/tools/ml/MockEventTrainer.java
index 0de7c962..242865a2 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/MockEventTrainer.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/MockEventTrainer.java
@@ -17,7 +17,6 @@
package opennlp.tools.ml;
-import java.io.IOException;
import java.util.Map;
import opennlp.tools.ml.model.DataIndexer;
@@ -28,12 +27,12 @@ import opennlp.tools.util.TrainingParameters;
public class MockEventTrainer implements EventTrainer {
- public MaxentModel train(ObjectStream<Event> events) throws IOException {
+ public MaxentModel train(ObjectStream<Event> events) {
return null;
}
@Override
- public MaxentModel train(DataIndexer indexer) throws IOException {
+ public MaxentModel train(DataIndexer indexer) {
return null;
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/MockSequenceTrainer.java b/opennlp-tools/src/test/java/opennlp/tools/ml/MockSequenceTrainer.java
index 19a8aaa6..7f4ad621 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/MockSequenceTrainer.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/MockSequenceTrainer.java
@@ -17,7 +17,6 @@
package opennlp.tools.ml;
-import java.io.IOException;
import java.util.Map;
import opennlp.tools.ml.model.AbstractModel;
@@ -26,7 +25,7 @@ import opennlp.tools.util.TrainingParameters;
public class MockSequenceTrainer implements EventModelSequenceTrainer {
- public AbstractModel train(SequenceStream events) throws IOException {
+ public AbstractModel train(SequenceStream events) {
return null;
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/PrepAttachDataUtil.java b/opennlp-tools/src/test/java/opennlp/tools/ml/PrepAttachDataUtil.java
index 66022c50..51cffdc2 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/PrepAttachDataUtil.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/PrepAttachDataUtil.java
@@ -25,7 +25,7 @@ import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
-import org.junit.Assert;
+import org.junit.jupiter.api.Assertions;
import opennlp.tools.ml.model.Event;
import opennlp.tools.ml.model.MaxentModel;
@@ -87,6 +87,6 @@ public class PrepAttachDataUtil {
double accuracy = correct / (double) total;
System.out.println("Accuracy on PPA devset: (" + correct + "/" + total + ") " + accuracy);
- Assert.assertEquals(expecedAccuracy, accuracy, .00001);
+ Assertions.assertEquals(expecedAccuracy, accuracy, .00001);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/TrainerFactoryTest.java b/opennlp-tools/src/test/java/opennlp/tools/ml/TrainerFactoryTest.java
index b08d28a4..9ef9b6be 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/TrainerFactoryTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/TrainerFactoryTest.java
@@ -17,9 +17,9 @@
package opennlp.tools.ml;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import opennlp.tools.ml.TrainerFactory.TrainerType;
import opennlp.tools.ml.maxent.GISTrainer;
@@ -30,8 +30,8 @@ public class TrainerFactoryTest {
private TrainingParameters mlParams;
- @Before
- public void setup() {
+ @BeforeEach
+ void setup() {
mlParams = new TrainingParameters();
mlParams.put(TrainingParameters.ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
mlParams.put(TrainingParameters.ITERATIONS_PARAM, 10);
@@ -39,43 +39,43 @@ public class TrainerFactoryTest {
}
@Test
- public void testBuiltInValid() {
- Assert.assertTrue(TrainerFactory.isValid(mlParams));
+ void testBuiltInValid() {
+ Assertions.assertTrue(TrainerFactory.isValid(mlParams));
}
@Test
- public void testSequenceTrainerValid() {
+ void testSequenceTrainerValid() {
mlParams.put(TrainingParameters.ALGORITHM_PARAM, MockSequenceTrainer.class.getCanonicalName());
- Assert.assertTrue(TrainerFactory.isValid(mlParams));
+ Assertions.assertTrue(TrainerFactory.isValid(mlParams));
}
@Test
- public void testEventTrainerValid() {
+ void testEventTrainerValid() {
mlParams.put(TrainingParameters.ALGORITHM_PARAM, MockEventTrainer.class.getCanonicalName());
- Assert.assertTrue(TrainerFactory.isValid(mlParams));
+ Assertions.assertTrue(TrainerFactory.isValid(mlParams));
}
@Test
- public void testInvalidTrainer() {
+ void testInvalidTrainer() {
mlParams.put(TrainingParameters.ALGORITHM_PARAM, "xyz");
- Assert.assertFalse(TrainerFactory.isValid(mlParams));
+ Assertions.assertFalse(TrainerFactory.isValid(mlParams));
}
@Test
- public void testIsSequenceTrainerTrue() {
+ void testIsSequenceTrainerTrue() {
mlParams.put(AbstractTrainer.ALGORITHM_PARAM,
SimplePerceptronSequenceTrainer.PERCEPTRON_SEQUENCE_VALUE);
TrainerType trainerType = TrainerFactory.getTrainerType(mlParams);
- Assert.assertTrue(TrainerType.EVENT_MODEL_SEQUENCE_TRAINER.equals(trainerType));
+ Assertions.assertTrue(TrainerType.EVENT_MODEL_SEQUENCE_TRAINER.equals(trainerType));
}
@Test
- public void testIsSequenceTrainerFalse() {
+ void testIsSequenceTrainerFalse() {
mlParams.put(AbstractTrainer.ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
TrainerType trainerType = TrainerFactory.getTrainerType(mlParams);
- Assert.assertFalse(TrainerType.EVENT_MODEL_SEQUENCE_TRAINER.equals(trainerType));
+ Assertions.assertFalse(TrainerType.EVENT_MODEL_SEQUENCE_TRAINER.equals(trainerType));
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/FootballEventStream.java b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/FootballEventStream.java
index a5b03b4f..0c10ad6a 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/FootballEventStream.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/FootballEventStream.java
@@ -26,22 +26,24 @@ import opennlp.tools.util.PlainTextByLineStream;
public class FootballEventStream implements ObjectStream<Event> {
ObjectStream<String> textStream;
-
+
public FootballEventStream() throws IOException {
textStream = new PlainTextByLineStream(
new URLInputStreamFactory(this.getClass().getResource("/opennlp/tools/ml/maxent/football.dat")),
- StandardCharsets.US_ASCII );
+ StandardCharsets.US_ASCII);
}
-
+
@Override
public Event read() throws IOException {
String line = textStream.read();
- if (line == null) return null;
+ if (line == null) {
+ return null;
+ }
String[] tokens = line.split("\\s+");
-
+
return new Event(tokens[tokens.length - 1], Arrays.copyOf(tokens, tokens.length - 1));
}
-
+
@Override
public void reset() throws IOException, UnsupportedOperationException {
textStream.reset();
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/GISIndexingTest.java b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/GISIndexingTest.java
index 03539a1f..6f847fda 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/GISIndexingTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/GISIndexingTest.java
@@ -23,8 +23,8 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.ml.AbstractEventTrainer;
import opennlp.tools.ml.AbstractTrainer;
@@ -42,12 +42,12 @@ import opennlp.tools.util.model.ModelUtil;
public class GISIndexingTest {
- private static String[][] cntx = new String[][]{
- {"dog","cat","mouse"},
- {"text", "print", "mouse"},
- {"dog", "pig", "cat", "mouse"}
+ private static String[][] cntx = new String[][] {
+ {"dog", "cat", "mouse"},
+ {"text", "print", "mouse"},
+ {"dog", "pig", "cat", "mouse"}
};
- private static String[] outputs = new String[]{"A","B","A"};
+ private static String[] outputs = new String[] {"A", "B", "A"};
private ObjectStream<Event> createEventStream() {
List<Event> events = new ArrayList<>();
@@ -56,19 +56,19 @@ public class GISIndexingTest {
}
return ObjectStreamUtils.createObjectStream(events);
}
-
+
/*
* Test the GIS.trainModel(ObjectStream<Event> eventStream) method
*/
@Test
- public void testGISTrainSignature1() throws IOException {
+ void testGISTrainSignature1() throws IOException {
try (ObjectStream<Event> eventStream = createEventStream()) {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
params.put(AbstractTrainer.CUTOFF_PARAM, 1);
- EventTrainer trainer = TrainerFactory.getEventTrainer(params, null);
+ EventTrainer trainer = TrainerFactory.getEventTrainer(params, null);
- Assert.assertNotNull(trainer.train(eventStream));
+ Assertions.assertNotNull(trainer.train(eventStream));
}
}
@@ -76,22 +76,22 @@ public class GISIndexingTest {
* Test the GIS.trainModel(ObjectStream<Event> eventStream,boolean smoothing) method
*/
@Test
- public void testGISTrainSignature2() throws IOException {
+ void testGISTrainSignature2() throws IOException {
try (ObjectStream<Event> eventStream = createEventStream()) {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
params.put(AbstractTrainer.CUTOFF_PARAM, 1);
params.put("smoothing", true);
EventTrainer trainer = TrainerFactory.getEventTrainer(params, null);
- Assert.assertNotNull(trainer.train(eventStream));
+ Assertions.assertNotNull(trainer.train(eventStream));
}
}
-
+
/*
* Test the GIS.trainModel(ObjectStream<Event> eventStream, int iterations, int cutoff) method
*/
@Test
- public void testGISTrainSignature3() throws IOException {
+ void testGISTrainSignature3() throws IOException {
try (ObjectStream<Event> eventStream = createEventStream()) {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
@@ -100,15 +100,15 @@ public class GISIndexingTest {
EventTrainer trainer = TrainerFactory.getEventTrainer(params, null);
- Assert.assertNotNull(trainer.train(eventStream));
+ Assertions.assertNotNull(trainer.train(eventStream));
}
}
-
+
/*
* Test the GIS.trainModel(ObjectStream<Event> eventStream, int iterations, int cutoff, double sigma) method
*/
@Test
- public void testGISTrainSignature4() throws IOException {
+ void testGISTrainSignature4() throws IOException {
try (ObjectStream<Event> eventStream = createEventStream()) {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
params.put(AbstractTrainer.ITERATIONS_PARAM, 10);
@@ -116,16 +116,16 @@ public class GISIndexingTest {
GISTrainer trainer = (GISTrainer) TrainerFactory.getEventTrainer(params, null);
trainer.setGaussianSigma(0.01);
- Assert.assertNotNull(trainer.trainModel(eventStream));
+ Assertions.assertNotNull(trainer.trainModel(eventStream));
}
}
-
+
/*
- * Test the GIS.trainModel((ObjectStream<Event> eventStream, int iterations, int cutoff,
+ * Test the GIS.trainModel((ObjectStream<Event> eventStream, int iterations, int cutoff,
* boolean smoothing, boolean printMessagesWhileTraining)) method
*/
@Test
- public void testGISTrainSignature5() throws IOException {
+ void testGISTrainSignature5() throws IOException {
try (ObjectStream<Event> eventStream = createEventStream()) {
TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
@@ -135,14 +135,14 @@ public class GISIndexingTest {
params.put(AbstractTrainer.VERBOSE_PARAM, false);
EventTrainer trainer = TrainerFactory.getEventTrainer(params, null);
- Assert.assertNotNull(trainer.train(eventStream));
+ Assertions.assertNotNull(trainer.train(eventStream));
}
}
-
+
@Test
- public void testIndexingWithTrainingParameters() throws IOException {
+ void testIndexingWithTrainingParameters() throws IOException {
ObjectStream<Event> eventStream = createEventStream();
-
+
TrainingParameters parameters = TrainingParameters.defaultParams();
// by default we are using GIS/EventTrainer/Cutoff of 5/100 iterations
parameters.put(TrainingParameters.ITERATIONS_PARAM, 10);
@@ -154,74 +154,74 @@ public class GISIndexingTest {
// guarantee that you have a GIS trainer...
EventTrainer trainer =
TrainerFactory.getEventTrainer(parameters, new HashMap<>());
- Assert.assertEquals("opennlp.tools.ml.maxent.GISTrainer", trainer.getClass().getName());
- AbstractEventTrainer aeTrainer = (AbstractEventTrainer)trainer;
+ Assertions.assertEquals("opennlp.tools.ml.maxent.GISTrainer", trainer.getClass().getName());
+ AbstractEventTrainer aeTrainer = (AbstractEventTrainer) trainer;
// guarantee that you have a OnePassDataIndexer ...
DataIndexer di = aeTrainer.getDataIndexer(eventStream);
- Assert.assertEquals("opennlp.tools.ml.model.OnePassDataIndexer", di.getClass().getName());
- Assert.assertEquals(3, di.getNumEvents());
- Assert.assertEquals(2, di.getOutcomeLabels().length);
- Assert.assertEquals(6, di.getPredLabels().length);
+ Assertions.assertEquals("opennlp.tools.ml.model.OnePassDataIndexer", di.getClass().getName());
+ Assertions.assertEquals(3, di.getNumEvents());
+ Assertions.assertEquals(2, di.getOutcomeLabels().length);
+ Assertions.assertEquals(6, di.getPredLabels().length);
// change the parameters and try again...
eventStream.reset();
-
+
parameters.put(TrainingParameters.ALGORITHM_PARAM, QNTrainer.MAXENT_QN_VALUE);
parameters.put(AbstractEventTrainer.DATA_INDEXER_PARAM, AbstractEventTrainer.DATA_INDEXER_TWO_PASS_VALUE);
parameters.put(AbstractEventTrainer.CUTOFF_PARAM, 2);
-
+
trainer = TrainerFactory.getEventTrainer(parameters, new HashMap<>());
- Assert.assertEquals("opennlp.tools.ml.maxent.quasinewton.QNTrainer", trainer.getClass().getName());
- aeTrainer = (AbstractEventTrainer)trainer;
+ Assertions.assertEquals("opennlp.tools.ml.maxent.quasinewton.QNTrainer", trainer.getClass().getName());
+ aeTrainer = (AbstractEventTrainer) trainer;
di = aeTrainer.getDataIndexer(eventStream);
- Assert.assertEquals("opennlp.tools.ml.model.TwoPassDataIndexer", di.getClass().getName());
-
+ Assertions.assertEquals("opennlp.tools.ml.model.TwoPassDataIndexer", di.getClass().getName());
+
eventStream.close();
}
-
+
@Test
- public void testIndexingFactory() throws IOException {
- Map<String,String> myReportMap = new HashMap<>();
+ void testIndexingFactory() throws IOException {
+ Map<String, String> myReportMap = new HashMap<>();
ObjectStream<Event> eventStream = createEventStream();
// set the cutoff to 1 for this test.
TrainingParameters parameters = new TrainingParameters();
parameters.put(AbstractDataIndexer.CUTOFF_PARAM, 1);
-
+
// test with a 1 pass data indexer...
parameters.put(AbstractEventTrainer.DATA_INDEXER_PARAM, AbstractEventTrainer.DATA_INDEXER_ONE_PASS_VALUE);
DataIndexer di = DataIndexerFactory.getDataIndexer(parameters, myReportMap);
- Assert.assertEquals("opennlp.tools.ml.model.OnePassDataIndexer", di.getClass().getName());
+ Assertions.assertEquals("opennlp.tools.ml.model.OnePassDataIndexer", di.getClass().getName());
di.index(eventStream);
- Assert.assertEquals(3, di.getNumEvents());
- Assert.assertEquals(2, di.getOutcomeLabels().length);
- Assert.assertEquals(6, di.getPredLabels().length);
+ Assertions.assertEquals(3, di.getNumEvents());
+ Assertions.assertEquals(2, di.getOutcomeLabels().length);
+ Assertions.assertEquals(6, di.getPredLabels().length);
eventStream.reset();
-
+
// test with a 2-pass data indexer...
parameters.put(AbstractEventTrainer.DATA_INDEXER_PARAM, AbstractEventTrainer.DATA_INDEXER_TWO_PASS_VALUE);
di = DataIndexerFactory.getDataIndexer(parameters, myReportMap);
- Assert.assertEquals("opennlp.tools.ml.model.TwoPassDataIndexer", di.getClass().getName());
+ Assertions.assertEquals("opennlp.tools.ml.model.TwoPassDataIndexer", di.getClass().getName());
di.index(eventStream);
- Assert.assertEquals(3, di.getNumEvents());
- Assert.assertEquals(2, di.getOutcomeLabels().length);
- Assert.assertEquals(6, di.getPredLabels().length);
+ Assertions.assertEquals(3, di.getNumEvents());
+ Assertions.assertEquals(2, di.getOutcomeLabels().length);
+ Assertions.assertEquals(6, di.getPredLabels().length);
// the rest of the test doesn't actually index, so we can close the eventstream.
eventStream.close();
-
+
// test with a 1-pass Real value dataIndexer
- parameters.put(AbstractEventTrainer.DATA_INDEXER_PARAM,
+ parameters.put(AbstractEventTrainer.DATA_INDEXER_PARAM,
AbstractEventTrainer.DATA_INDEXER_ONE_PASS_REAL_VALUE);
di = DataIndexerFactory.getDataIndexer(parameters, myReportMap);
- Assert.assertEquals("opennlp.tools.ml.model.OnePassRealValueDataIndexer", di.getClass().getName());
-
-
+ Assertions.assertEquals("opennlp.tools.ml.model.OnePassRealValueDataIndexer", di.getClass().getName());
+
+
// test with an UNRegistered MockIndexer
- parameters.put(AbstractEventTrainer.DATA_INDEXER_PARAM, "opennlp.tools.ml.maxent.MockDataIndexer");
+ parameters.put(AbstractEventTrainer.DATA_INDEXER_PARAM, "opennlp.tools.ml.maxent.MockDataIndexer");
di = DataIndexerFactory.getDataIndexer(parameters, myReportMap);
- Assert.assertEquals("opennlp.tools.ml.maxent.MockDataIndexer", di.getClass().getName());
+ Assertions.assertEquals("opennlp.tools.ml.maxent.MockDataIndexer", di.getClass().getName());
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/GISTrainerTest.java b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/GISTrainerTest.java
index f2baa174..ff0b89cc 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/GISTrainerTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/GISTrainerTest.java
@@ -20,8 +20,8 @@ package opennlp.tools.ml.maxent;
import java.util.HashMap;
import java.util.Map;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.ml.EventTrainer;
import opennlp.tools.ml.TrainerFactory;
@@ -35,8 +35,8 @@ public class GISTrainerTest {
@SuppressWarnings("unchecked")
@Test
- public void testGaussianSmoothing() throws Exception {
-
+ void testGaussianSmoothing() throws Exception {
+
TrainingParameters params = new TrainingParameters();
params.put("Algorithm", "MAXENT");
params.put("DataIndexer", "OnePass");
@@ -46,34 +46,34 @@ public class GISTrainerTest {
Map<String, String> reportMap = new HashMap<>();
EventTrainer trainer = TrainerFactory.getEventTrainer(params, reportMap);
-
+
ObjectStream<Event> eventStream = new FootballEventStream();
- AbstractModel smoothedModel = (AbstractModel)trainer.train(eventStream);
- Map<String, Context> predMap = (Map<String, Context>)smoothedModel.getDataStructures()[1];
+ AbstractModel smoothedModel = (AbstractModel) trainer.train(eventStream);
+ Map<String, Context> predMap = (Map<String, Context>) smoothedModel.getDataStructures()[1];
- double[] nevilleFalseExpected = new double[] {-0.17,.10,0.05};
- double[] nevilleTrueExpected = new double[] {0.080,-0.047,-0.080};
+ double[] nevilleFalseExpected = new double[] {-0.17, .10, 0.05};
+ double[] nevilleTrueExpected = new double[] {0.080, -0.047, -0.080};
String predicateToTest = "Neville=false";
- Assert.assertArrayEquals(nevilleFalseExpected, predMap.get(predicateToTest).getParameters(), 0.01);
+ Assertions.assertArrayEquals(nevilleFalseExpected, predMap.get(predicateToTest).getParameters(), 0.01);
predicateToTest = "Neville=true";
- Assert.assertArrayEquals(nevilleTrueExpected, predMap.get(predicateToTest).getParameters(), 0.001);
-
+ Assertions.assertArrayEquals(nevilleTrueExpected, predMap.get(predicateToTest).getParameters(), 0.001);
+
eventStream.reset();
params.put("GaussianSmoothing", false);
trainer = TrainerFactory.getEventTrainer(params, reportMap);
- AbstractModel unsmoothedModel = (AbstractModel)trainer.train(eventStream);
- predMap = (Map<String, Context>)unsmoothedModel.getDataStructures()[1];
-
- nevilleFalseExpected = new double[] {-0.19,0.11,0.06};
- nevilleTrueExpected = new double[] {0.081,-0.050,-0.084};
+ AbstractModel unsmoothedModel = (AbstractModel) trainer.train(eventStream);
+ predMap = (Map<String, Context>) unsmoothedModel.getDataStructures()[1];
+
+ nevilleFalseExpected = new double[] {-0.19, 0.11, 0.06};
+ nevilleTrueExpected = new double[] {0.081, -0.050, -0.084};
predicateToTest = "Neville=false";
- Assert.assertArrayEquals(nevilleFalseExpected, predMap.get(predicateToTest).getParameters(), 0.01);
+ Assertions.assertArrayEquals(nevilleFalseExpected, predMap.get(predicateToTest).getParameters(), 0.01);
predicateToTest = "Neville=true";
- Assert.assertArrayEquals(nevilleTrueExpected, predMap.get(predicateToTest).getParameters(), 0.001);
+ Assertions.assertArrayEquals(nevilleTrueExpected, predMap.get(predicateToTest).getParameters(), 0.001);
eventStream.close();
}
-
+
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/MaxentPrepAttachTest.java b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/MaxentPrepAttachTest.java
index 1bcdb5c1..62fe126f 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/MaxentPrepAttachTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/MaxentPrepAttachTest.java
@@ -20,8 +20,8 @@ package opennlp.tools.ml.maxent;
import java.io.IOException;
import java.util.HashMap;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import opennlp.tools.ml.AbstractEventTrainer;
import opennlp.tools.ml.AbstractTrainer;
@@ -39,30 +39,31 @@ import opennlp.tools.util.TrainingParameters;
public class MaxentPrepAttachTest {
private DataIndexer testDataIndexer;
- @Before
- public void initIndexer() {
+
+ @BeforeEach
+ void initIndexer() {
TrainingParameters trainingParameters = new TrainingParameters();
trainingParameters.put(AbstractTrainer.CUTOFF_PARAM, 1);
trainingParameters.put(AbstractDataIndexer.SORT_PARAM, false);
testDataIndexer = new TwoPassDataIndexer();
testDataIndexer.init(trainingParameters, new HashMap<>());
}
-
+
@Test
- public void testMaxentOnPrepAttachData() throws IOException {
+ void testMaxentOnPrepAttachData() throws IOException {
testDataIndexer.index(PrepAttachDataUtil.createTrainingStream());
// this shows why the GISTrainer should be a AbstractEventTrainer.
// TODO: make sure that the trainingParameter cutoff and the
// cutoff value passed here are equal.
AbstractModel model =
new GISTrainer(true).trainModel(100,
- testDataIndexer,
- new UniformPrior(), 1);
+ testDataIndexer,
+ new UniformPrior(), 1);
PrepAttachDataUtil.testModel(model, 0.7997028967566229);
}
@Test
- public void testMaxentOnPrepAttachData2Threads() throws IOException {
+ void testMaxentOnPrepAttachData2Threads() throws IOException {
testDataIndexer.index(PrepAttachDataUtil.createTrainingStream());
AbstractModel model =
new GISTrainer(true).trainModel(100,
@@ -72,7 +73,7 @@ public class MaxentPrepAttachTest {
}
@Test
- public void testMaxentOnPrepAttachDataWithParams() throws IOException {
+ void testMaxentOnPrepAttachDataWithParams() throws IOException {
TrainingParameters trainParams = new TrainingParameters();
trainParams.put(AbstractTrainer.ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
@@ -87,7 +88,7 @@ public class MaxentPrepAttachTest {
}
@Test
- public void testMaxentOnPrepAttachDataWithParamsDefault() throws IOException {
+ void testMaxentOnPrepAttachDataWithParamsDefault() throws IOException {
TrainingParameters trainParams = new TrainingParameters();
trainParams.put(AbstractTrainer.ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
@@ -95,11 +96,11 @@ public class MaxentPrepAttachTest {
EventTrainer trainer = TrainerFactory.getEventTrainer(trainParams, null);
MaxentModel model = trainer.train(PrepAttachDataUtil.createTrainingStream());
- PrepAttachDataUtil.testModel(model, 0.8086159940579352 );
+ PrepAttachDataUtil.testModel(model, 0.8086159940579352);
}
-
+
@Test
- public void testMaxentOnPrepAttachDataWithParamsLLThreshold() throws IOException {
+ void testMaxentOnPrepAttachDataWithParamsLLThreshold() throws IOException {
TrainingParameters trainParams = new TrainingParameters();
trainParams.put(AbstractTrainer.ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
trainParams.put(GISTrainer.LOG_LIKELIHOOD_THRESHOLD_PARAM, 5.);
@@ -107,6 +108,6 @@ public class MaxentPrepAttachTest {
EventTrainer trainer = TrainerFactory.getEventTrainer(trainParams, null);
MaxentModel model = trainer.train(PrepAttachDataUtil.createTrainingStream());
- PrepAttachDataUtil.testModel(model, 0.8103490963109681 );
+ PrepAttachDataUtil.testModel(model, 0.8103490963109681);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/MockDataIndexer.java b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/MockDataIndexer.java
index 7aeb62de..881159e8 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/MockDataIndexer.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/MockDataIndexer.java
@@ -17,7 +17,6 @@
package opennlp.tools.ml.maxent;
-import java.io.IOException;
import java.util.Map;
import opennlp.tools.ml.model.DataIndexer;
@@ -70,11 +69,11 @@ public class MockDataIndexer implements DataIndexer {
@Override
public void init(TrainingParameters trainParams,
- Map<String, String> reportMap) {
+ Map<String, String> reportMap) {
}
@Override
- public void index(ObjectStream<Event> eventStream) throws IOException {
+ public void index(ObjectStream<Event> eventStream) {
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/RealValueModelTest.java b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/RealValueModelTest.java
index fbff6180..30cc3d4d 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/RealValueModelTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/RealValueModelTest.java
@@ -20,9 +20,9 @@ package opennlp.tools.ml.maxent;
import java.io.IOException;
import java.util.HashMap;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import opennlp.tools.ml.AbstractTrainer;
import opennlp.tools.ml.model.DataIndexer;
@@ -35,16 +35,17 @@ import opennlp.tools.util.TrainingParameters;
public class RealValueModelTest {
private DataIndexer testDataIndexer;
- @Before
- public void initIndexer() {
+
+ @BeforeEach
+ void initIndexer() {
TrainingParameters trainingParameters = new TrainingParameters();
trainingParameters.put(AbstractTrainer.CUTOFF_PARAM, 1);
testDataIndexer = new OnePassRealValueDataIndexer();
testDataIndexer.init(trainingParameters, new HashMap<>());
}
-
+
@Test
- public void testRealValuedWeightsVsRepeatWeighting() throws IOException {
+ void testRealValuedWeightsVsRepeatWeighting() throws IOException {
GISModel realModel;
GISTrainer gisTrainer = new GISTrainer();
try (RealValueFileEventStream rvfes1 = new RealValueFileEventStream(
@@ -57,34 +58,34 @@ public class RealValueModelTest {
try (FileEventStream rvfes2 = new FileEventStream(
"src/test/resources/data/opennlp/maxent/repeat-weighting-training-data.txt")) {
testDataIndexer.index(rvfes2);
- repeatModel = gisTrainer.trainModel(100,testDataIndexer);
+ repeatModel = gisTrainer.trainModel(100, testDataIndexer);
}
- String[] features2Classify = new String[] {"feature2","feature5"};
+ String[] features2Classify = new String[] {"feature2", "feature5"};
double[] realResults = realModel.eval(features2Classify);
double[] repeatResults = repeatModel.eval(features2Classify);
- Assert.assertEquals(realResults.length, repeatResults.length);
+ Assertions.assertEquals(realResults.length, repeatResults.length);
for (int i = 0; i < realResults.length; i++) {
System.out.println(String.format("classifiy with realModel: %1$s = %2$f",
realModel.getOutcome(i), realResults[i]));
System.out.println(String.format("classifiy with repeatModel: %1$s = %2$f",
repeatModel.getOutcome(i), repeatResults[i]));
- Assert.assertEquals(realResults[i], repeatResults[i], 0.01f);
+ Assertions.assertEquals(repeatResults[i], realResults[i], 0.01f);
}
- features2Classify = new String[] {"feature1","feature2","feature3","feature4","feature5"};
+ features2Classify = new String[] {"feature1", "feature2", "feature3", "feature4", "feature5"};
realResults = realModel.eval(features2Classify, new float[] {5.5f, 6.1f, 9.1f, 4.0f, 1.8f});
repeatResults = repeatModel.eval(features2Classify, new float[] {5.5f, 6.1f, 9.1f, 4.0f, 1.8f});
System.out.println();
- Assert.assertEquals(realResults.length, repeatResults.length);
+ Assertions.assertEquals(realResults.length, repeatResults.length);
for (int i = 0; i < realResults.length; i++) {
System.out.println(String.format("classifiy with realModel: %1$s = %2$f",
realModel.getOutcome(i), realResults[i]));
System.out.println(String.format("classifiy with repeatModel: %1$s = %2$f",
repeatModel.getOutcome(i), repeatResults[i]));
- Assert.assertEquals(realResults[i], repeatResults[i], 0.01f);
+ Assertions.assertEquals(repeatResults[i], realResults[i],0.01f);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/ScaleDoesntMatterTest.java b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/ScaleDoesntMatterTest.java
index ed7b2a1c..2923264f 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/ScaleDoesntMatterTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/ScaleDoesntMatterTest.java
@@ -20,9 +20,9 @@ package opennlp.tools.ml.maxent;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import opennlp.tools.ml.AbstractTrainer;
import opennlp.tools.ml.EventTrainer;
@@ -42,8 +42,8 @@ public class ScaleDoesntMatterTest {
private DataIndexer testDataIndexer;
- @Before
- public void initIndexer() {
+ @BeforeEach
+ void initIndexer() {
TrainingParameters trainingParameters = new TrainingParameters();
trainingParameters.put(AbstractTrainer.CUTOFF_PARAM, 0);
testDataIndexer = new OnePassRealValueDataIndexer();
@@ -57,7 +57,7 @@ public class ScaleDoesntMatterTest {
* we use (0.1,0.2) and (10,20) there is a difference.
*/
@Test
- public void testScaleResults() throws Exception {
+ void testScaleResults() throws Exception {
String smallValues = "predA=0.1 predB=0.2 A\n" + "predB=0.3 predA=0.1 B\n";
String smallTest = "predA=0.2 predB=0.2";
@@ -98,7 +98,7 @@ public class ScaleDoesntMatterTest {
String largeResultString = largeModel.getAllOutcomes(largeResults);
System.out.println("largeResults: " + largeResultString);
- Assert.assertEquals(smallResults.length, largeResults.length);
+ Assertions.assertEquals(smallResults.length, largeResults.length);
for (int i = 0; i < smallResults.length; i++) {
System.out.println(String.format(
"classifiy with smallModel: %1$s = %2$f", smallModel.getOutcome(i),
@@ -106,7 +106,7 @@ public class ScaleDoesntMatterTest {
System.out.println(String.format(
"classifiy with largeModel: %1$s = %2$f", largeModel.getOutcome(i),
largeResults[i]));
- Assert.assertEquals(smallResults[i], largeResults[i], 0.01f);
+ Assertions.assertEquals(largeResults[i], smallResults[i], 0.01f);
}
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/URLInputStreamFactory.java b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/URLInputStreamFactory.java
index 4ea64d0e..30c6eb92 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/URLInputStreamFactory.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/URLInputStreamFactory.java
@@ -27,10 +27,11 @@ public class URLInputStreamFactory implements InputStreamFactory {
private URL url;
+
public URLInputStreamFactory(URL url) {
this.url = url;
}
-
+
@Override
public InputStream createInputStream() throws IOException {
return url.openStream();
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/io/RealValueFileEventStreamTest.java b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/io/RealValueFileEventStreamTest.java
index b5425acf..08f2e8e5 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/io/RealValueFileEventStreamTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/io/RealValueFileEventStreamTest.java
@@ -20,9 +20,9 @@ package opennlp.tools.ml.maxent.io;
import java.io.IOException;
import java.util.HashMap;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import opennlp.tools.ml.AbstractTrainer;
import opennlp.tools.ml.model.DataIndexer;
@@ -34,8 +34,8 @@ public class RealValueFileEventStreamTest {
private DataIndexer indexer;
- @Before
- public void initIndexer() {
+ @BeforeEach
+ void initIndexer() {
TrainingParameters trainingParameters = new TrainingParameters();
trainingParameters.put(AbstractTrainer.CUTOFF_PARAM, 1);
indexer = new OnePassRealValueDataIndexer();
@@ -43,17 +43,17 @@ public class RealValueFileEventStreamTest {
}
@Test
- public void testLastLineBug() throws IOException {
+ void testLastLineBug() throws IOException {
try (RealValueFileEventStream rvfes = new RealValueFileEventStream(
"src/test/resources/data/opennlp/maxent/io/rvfes-bug-data-ok.txt")) {
indexer.index(rvfes);
}
- Assert.assertEquals(1, indexer.getOutcomeLabels().length);
+ Assertions.assertEquals(1, indexer.getOutcomeLabels().length);
try (RealValueFileEventStream rvfes = new RealValueFileEventStream(
"src/test/resources/data/opennlp/maxent/io/rvfes-bug-data-broken.txt")) {
indexer.index(rvfes);
}
- Assert.assertEquals(1, indexer.getOutcomeLabels().length);
+ Assertions.assertEquals(1, indexer.getOutcomeLabels().length);
}
}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/LineSearchTest.java b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/LineSearchTest.java
index 8e37970e..75d71ec7 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/LineSearchTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/LineSearchTest.java
@@ -17,8 +17,8 @@
package opennlp.tools.ml.maxent.quasinewton;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
import opennlp.tools.ml.maxent.quasinewton.LineSearch.LineSearchResult;
@@ -26,145 +26,145 @@ public class LineSearchTest {
private static final double TOLERANCE = 0.01;
@Test
- public void testLineSearchDeterminesSaneStepLength1() {
+ void testLineSearchDeterminesSaneStepLength1() {
Function objectiveFunction = new QuadraticFunction1();
// given
- double[] testX = new double[] { 0 };
+ double[] testX = new double[] {0};
double testValueX = objectiveFunction.valueAt(testX);
double[] testGradX = objectiveFunction.gradientAt(testX);
- double[] testDirection = new double[] { 1 };
+ double[] testDirection = new double[] {1};
// when
LineSearchResult lsr = LineSearchResult.getInitialObject(testValueX, testGradX, testX);
LineSearch.doLineSearch(objectiveFunction, testDirection, lsr, 1.0);
double stepSize = lsr.getStepSize();
// then
boolean succCond = TOLERANCE < stepSize && stepSize <= 1;
- Assert.assertTrue(succCond);
+ Assertions.assertTrue(succCond);
}
@Test
- public void testLineSearchDeterminesSaneStepLength2() {
+ void testLineSearchDeterminesSaneStepLength2() {
Function objectiveFunction = new QuadraticFunction2();
// given
- double[] testX = new double[] { -2 };
+ double[] testX = new double[] {-2};
double testValueX = objectiveFunction.valueAt(testX);
double[] testGradX = objectiveFunction.gradientAt(testX);
- double[] testDirection = new double[] { 1 };
+ double[] testDirection = new double[] {1};
// when
LineSearchResult lsr = LineSearchResult.getInitialObject(testValueX, testGradX, testX);
LineSearch.doLineSearch(objectiveFunction, testDirection, lsr, 1.0);
double stepSize = lsr.getStepSize();
// then
boolean succCond = TOLERANCE < stepSize && stepSize <= 1;
- Assert.assertTrue(succCond);
+ Assertions.assertTrue(succCond);
}
@Test
- public void testLineSearchFailsWithWrongDirection1() {
- Function objectiveFunction = new QuadraticFunction1();
+ void testLineSearchFailsWithWrongDirection1() {
+ Function objectiveFunction = new QuadraticFunction1();
// given
- double[] testX = new double[] { 0 };
+ double[] testX = new double[] {0};
double testValueX = objectiveFunction.valueAt(testX);
double[] testGradX = objectiveFunction.gradientAt(testX);
- double[] testDirection = new double[] { -1 };
+ double[] testDirection = new double[] {-1};
// when
LineSearchResult lsr = LineSearchResult.getInitialObject(testValueX, testGradX, testX);
LineSearch.doLineSearch(objectiveFunction, testDirection, lsr, 1.0);
double stepSize = lsr.getStepSize();
// then
boolean succCond = TOLERANCE < stepSize && stepSize <= 1;
- Assert.assertFalse(succCond);
- Assert.assertEquals(0.0, stepSize, TOLERANCE);
+ Assertions.assertFalse(succCond);
+ Assertions.assertEquals(0.0, stepSize, TOLERANCE);
}
@Test
- public void testLineSearchFailsWithWrongDirection2() {
+ void testLineSearchFailsWithWrongDirection2() {
Function objectiveFunction = new QuadraticFunction2();
// given
- double[] testX = new double[] { -2 };
+ double[] testX = new double[] {-2};
double testValueX = objectiveFunction.valueAt(testX);
double[] testGradX = objectiveFunction.gradientAt(testX);
- double[] testDirection = new double[] { -1 };
+ double[] testDirection = new double[] {-1};
// when
LineSearchResult lsr = LineSearchResult.getInitialObject(testValueX, testGradX, testX);
LineSearch.doLineSearch(objectiveFunction, testDirection, lsr, 1.0);
double stepSize = lsr.getStepSize();
// then
boolean succCond = TOLERANCE < stepSize && stepSize <= 1;
- Assert.assertFalse(succCond);
- Assert.assertEquals(0.0, stepSize, TOLERANCE);
+ Assertions.assertFalse(succCond);
+ Assertions.assertEquals(0.0, stepSize, TOLERANCE);
}
@Test
- public void testLineSearchFailsWithWrongDirection3() {
+ void testLineSearchFailsWithWrongDirection3() {
Function objectiveFunction = new QuadraticFunction1();
// given
- double[] testX = new double[] { 4 };
+ double[] testX = new double[] {4};
double testValueX = objectiveFunction.valueAt(testX);
double[] testGradX = objectiveFunction.gradientAt(testX);
- double[] testDirection = new double[] { 1 };
+ double[] testDirection = new double[] {1};
// when
LineSearchResult lsr = LineSearchResult.getInitialObject(testValueX, testGradX, testX);
LineSearch.doLineSearch(objectiveFunction, testDirection, lsr, 1.0);
double stepSize = lsr.getStepSize();
// then
boolean succCond = TOLERANCE < stepSize && stepSize <= 1;
- Assert.assertFalse(succCond);
- Assert.assertEquals(0.0, stepSize, TOLERANCE);
+ Assertions.assertFalse(succCond);
+ Assertions.assertEquals(0.0, stepSize, TOLERANCE);
}
@Test
- public void testLineSearchFailsWithWrongDirection4() {
+ void testLineSearchFailsWithWrongDirection4() {
Function objectiveFunction = new QuadraticFunction2();
// given
- double[] testX = new double[] { 2 };
... 14499 lines suppressed ...