You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@nlpcraft.apache.org by se...@apache.org on 2022/06/19 09:33:10 UTC

[incubator-nlpcraft] branch scala created (now 5aaf1d07)

This is an automated email from the ASF dual-hosted git repository.

sergeykamov pushed a change to branch scala
in repository https://gitbox.apache.org/repos/asf/incubator-nlpcraft.git


      at 5aaf1d07 Scala initial version.

This branch includes the following new commits:

     new 5aaf1d07 Scala initial version.

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[incubator-nlpcraft] 01/01: Scala initial version.

Posted by se...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sergeykamov pushed a commit to branch scala
in repository https://gitbox.apache.org/repos/asf/incubator-nlpcraft.git

commit 5aaf1d075895a52e6a0cb5556d291efb8a5ffed1
Author: Sergey Kamov <sk...@gmail.com>
AuthorDate: Sun Jun 19 12:32:58 2022 +0300

    Scala initial version.
---
 .../examples/lightswitch/LightSwitchFrModel.scala  |   8 +-
 .../entity/parser/NCFrSemanticEntityParser.scala   |   2 +-
 .../token/enricher/NCFrLemmaPosTokenEnricher.scala |   5 +-
 .../enricher/NCFrStopWordsTokenEnricher.scala      |   8 +-
 .../nlp/token/parser/NCFrTokenParser.scala         |   7 +-
 .../examples/lightswitch/LightSwitchRuModel.scala  |   8 +-
 .../entity/parser/NCRuSemanticEntityParser.scala   |   4 +-
 .../token/enricher/NCRuLemmaPosTokenEnricher.scala |   5 +-
 .../enricher/NCRuStopWordsTokenEnricher.scala      |  11 +-
 .../nlp/token/parser/NCRuTokenParser.scala         |   7 +-
 .../lightswitch/LightSwitchGroovyModel.groovy      |  88 -----
 .../examples/lightswitch/LightSwitchJavaModel.java |  93 ------
 .../examples/lightswitch/LightSwitchKotlinModel.kt |  91 -----
 .../lightswitch/LightSwitchScalaModel.scala        |   5 +-
 .../lightswitch/NCModelValidationSpec.scala        |  11 +-
 .../apache/nlpcraft/examples/time/TimeModel.java   | 189 -----------
 .../time/utils/cities/CitiesDataProvider.java      |  80 -----
 .../nlpcraft/examples/time/utils/cities/City.java  |  81 -----
 .../examples/time/utils/cities/CityData.java       |  67 ----
 .../examples/time/utils/keycdn/GeoData.java        | 122 -------
 .../examples/time/utils/keycdn/GeoManager.java     | 150 ---------
 .../examples/time/utils/keycdn/Response.java       |  81 -----
 .../examples/time/utils/keycdn/ResponseData.java   |  43 ---
 .../apache/nlpcraft/examples/time/TimeModel.scala  | 123 +++++++
 .../time/utils/cities/CitiesDataProvider.scala}    |  33 +-
 .../examples/time/utils/keycdn/GeoManager.scala    | 108 ++++++
 .../parser/stanford/NCStanfordNLPEntityParser.java |  88 -----
 .../parser/stanford/NCStanfordNLPTokenParser.java  |  56 ----
 .../stanford/NCStanfordNLPEntityParser.scala}      |  24 +-
 .../stanford/NCStanfordNLPTokenParser.scala}       |  18 +-
 .../stanford/NCStanfordNLPEntityParserSpec.scala   |  14 +-
 .../stanford/NCStanfordNLPTokenParserSpec.scala    |   2 +-
 .../package-info.java => NCCallbackData.scala}     |  12 +-
 .../main/scala/org/apache/nlpcraft/NCContext.java  |  71 ----
 .../main/scala/org/apache/nlpcraft/NCContext.scala |  60 ++++
 .../scala/org/apache/nlpcraft/NCConversation.java  | 112 -------
 .../scala/org/apache/nlpcraft/NCConversation.scala | 102 ++++++
 .../main/scala/org/apache/nlpcraft/NCCuration.java |  47 ---
 .../intent/NCIDLEntity.scala => NCCuration.scala}  |  18 +-
 .../org/apache/nlpcraft/NCDialogFlowItem.java      |  48 ---
 ...SemanticElement.java => NCDialogFlowItem.scala} |  64 ++--
 .../main/scala/org/apache/nlpcraft/NCEntity.java   |  87 -----
 .../main/scala/org/apache/nlpcraft/NCEntity.scala  |  74 +++++
 .../org/apache/nlpcraft/NCEntityEnricher.java      |  49 ---
 .../org/apache/nlpcraft/NCEntityEnricher.scala     |  45 +++
 .../scala/org/apache/nlpcraft/NCEntityMapper.java  |  49 ---
 .../scala/org/apache/nlpcraft/NCEntityMapper.scala |  47 +++
 .../scala/org/apache/nlpcraft/NCEntityParser.java  |  49 ---
 .../scala/org/apache/nlpcraft/NCEntityParser.scala |  45 +++
 .../org/apache/nlpcraft/NCEntityValidator.java     |  47 ---
 .../org/apache/nlpcraft/NCEntityValidator.scala    |  43 +++
 .../scala/org/apache/nlpcraft/NCException.java     |  42 ---
 .../{NCResultType.java => NCException.scala}       |  17 +-
 .../scala/org/apache/nlpcraft/NCIntentMatch.java   |  89 -----
 .../scala/org/apache/nlpcraft/NCIntentMatch.scala  |  76 +++++
 .../scala/org/apache/nlpcraft/NCIntentSkip.java    |  55 ---
 .../scala/org/apache/nlpcraft/NCIntentSkip.scala   |  38 +++
 .../scala/org/apache/nlpcraft/NCLifecycle.java     |  48 ---
 .../{NCModelAdapter.java => NCLifecycle.scala}     |  49 ++-
 .../main/scala/org/apache/nlpcraft/NCModel.java    | 209 ------------
 .../main/scala/org/apache/nlpcraft/NCModel.scala   | 187 +++++++++++
 ...NCSemanticStemmer.java => NCModelAdapter.scala} |  16 +-
 .../scala/org/apache/nlpcraft/NCModelClient.java   | 126 -------
 ...NCModelClientImpl.scala => NCModelClient.scala} |  45 ++-
 .../scala/org/apache/nlpcraft/NCModelConfig.java   | 141 --------
 .../{NCModelAdapter.java => NCModelConfig.scala}   |  46 +--
 .../main/scala/org/apache/nlpcraft/NCPipeline.java | 121 -------
 .../scala/org/apache/nlpcraft/NCPipeline.scala     |  80 +++++
 .../org/apache/nlpcraft/NCPipelineBuilder.java     | 368 ---------------------
 .../org/apache/nlpcraft/NCPipelineBuilder.scala    | 238 +++++++++++++
 .../scala/org/apache/nlpcraft/NCPropertyMap.java   | 110 ------
 .../scala/org/apache/nlpcraft/NCPropertyMap.scala  |  96 ++++++
 .../org/apache/nlpcraft/NCPropertyMapAdapter.java  |  76 -----
 .../org/apache/nlpcraft/NCPropertyMapAdapter.scala |  48 +++
 .../scala/org/apache/nlpcraft/NCRejection.java     |  47 ---
 .../{NCResultType.java => NCRejection.scala}       |  22 +-
 .../main/scala/org/apache/nlpcraft/NCRequest.java  |  69 ----
 .../main/scala/org/apache/nlpcraft/NCRequest.scala |  59 ++++
 .../main/scala/org/apache/nlpcraft/NCResult.java   | 109 ------
 .../{NCCallbackData.java => NCResult.scala}        |  31 +-
 .../NCSemanticStemmer.java => NCResultType.scala}  |  15 +-
 .../main/scala/org/apache/nlpcraft/NCToken.java    |  71 ----
 .../main/scala/org/apache/nlpcraft/NCToken.scala   |  65 ++++
 .../org/apache/nlpcraft/NCTokenEnricher.scala}     |  20 +-
 .../scala/org/apache/nlpcraft/NCTokenParser.java   |  46 ---
 .../{NCTokenValidator.java => NCTokenParser.scala} |  40 +--
 .../NCIDLEntity.scala => NCTokenValidator.scala}   |  24 +-
 .../main/scala/org/apache/nlpcraft/NCVariant.java  |  36 --
 .../{NCTokenEnricher.java => NCVariant.scala}      |  34 +-
 ...CSemanticStemmer.java => NCVariantFilter.scala} |  17 +-
 .../nlpcraft/{ => annotations}/NCIntent.java       |   6 +-
 .../nlpcraft/{ => annotations}/NCIntentObject.java |   2 +-
 .../nlpcraft/{ => annotations}/NCIntentRef.java    |   6 +-
 .../nlpcraft/{ => annotations}/NCIntentSample.java |   6 +-
 .../{ => annotations}/NCIntentSampleRef.java       |   6 +-
 .../nlpcraft/{ => annotations}/NCIntentTerm.java   |   6 +-
 .../nlpcraft/internal/ascii/NCAsciiTable.scala     |  14 +-
 .../internal/conversation/NCConversationData.scala |  18 +-
 .../conversation/NCConversationManager.scala       |   7 +-
 .../internal/dialogflow/NCDialogFlowManager.scala  |  16 +-
 .../internal/impl/NCModelPipelineManager.scala     |  64 ++--
 .../nlpcraft/internal/impl/NCModelScanner.scala    |  54 ++-
 .../nlpcraft/internal/intent/NCIDLEntity.scala     |   4 +-
 .../intent/compiler/NCIDLCodeGenerator.scala       |  26 +-
 .../internal/intent/compiler/NCIDLCompiler.scala   |  14 +-
 .../intent/compiler/antlr4/NCIDLLexer.java         |  15 +-
 .../intent/compiler/antlr4/NCIDLParser.java        |  25 +-
 .../intent/matcher/NCIntentSolverManager.scala     | 114 ++++---
 .../nlpcraft/internal/makro/NCMacroCompiler.scala  |   8 +-
 .../nlpcraft/internal/makro/NCMacroJavaParser.java |  77 -----
 .../internal/makro/NCMacroJavaParserTrait.java     |  59 ----
 .../nlpcraft/internal/makro/NCMacroParser.scala    |  10 +-
 .../internal/makro/antlr4/NCMacroDslLexer.java     |  15 +-
 .../internal/makro/antlr4/NCMacroDslParser.java    |  25 +-
 .../apache/nlpcraft/internal/util/NCUtils.scala    |   4 +-
 .../nlp/entity/parser/NCNLPEntityParser.java       |  51 ---
 ...ityParserImpl.scala => NCNLPEntityParser.scala} |  20 +-
 .../nlp/entity/parser/NCOpenNLPEntityParser.java   |  79 -----
 ...arserImpl.scala => NCOpenNLPEntityParser.scala} |  25 +-
 .../parser/semantic/NCSemanticElement.scala}       |  22 +-
 .../parser/semantic/NCSemanticEntityParser.java    |  98 ------
 .../NCSemanticEntityParser.scala}                  |  81 +++--
 .../parser/semantic/NCSemanticStemmer.scala}       |   9 +-
 .../impl}/NCSemanticSourceReader.scala             |  32 +-
 .../impl}/NCSemanticSynonym.scala                  |   8 +-
 .../impl}/NCSemanticSynonymsProcessor.scala        |  36 +-
 .../nlp/entity/parser/semantic/package-info.java   |  21 --
 .../token/enricher/NCEnBracketsTokenEnricher.java  |  49 ---
 ...rImpl.scala => NCEnBracketsTokenEnricher.scala} |  10 +-
 .../enricher/NCEnDictionaryTokenEnricher.java      |  49 ---
 ...mpl.scala => NCEnDictionaryTokenEnricher.scala} |  12 +-
 .../token/enricher/NCEnQuotesTokenEnricher.java    |  49 ---
 ...herImpl.scala => NCEnQuotesTokenEnricher.scala} |  12 +-
 .../token/enricher/NCEnStopWordsTokenEnricher.java |  61 ----
 ...Impl.scala => NCEnStopWordsTokenEnricher.scala} |  38 +--
 .../enricher/NCEnSwearWordsTokenEnricher.java      |  61 ----
 ...mpl.scala => NCEnSwearWordsTokenEnricher.scala} |  13 +-
 .../enricher/NCOpenNLPLemmaPosTokenEnricher.java   |  60 ----
 ....scala => NCOpenNLPLemmaPosTokenEnricher.scala} |  42 +--
 .../{impl => tools}/NCEnStopWordGenerator.scala    |   6 +-
 .../nlp/token/parser/NCOpenNLPTokenParser.java     |  54 ---
 ...ParserImpl.scala => NCOpenNLPTokenParser.scala} |  15 +-
 .../nlpcraft/nlp/token/parser/package-info.java    |  21 --
 .../scala/org/apache/nlpcraft/package-info.java    |  21 --
 .../conversation/NCConversationManagerSpec.scala   |   6 +-
 .../internal/conversation/NCConversationSpec.scala |  13 +-
 .../conversation/NCConversationTimeoutSpec.scala   |  12 +-
 .../dialogflow/NCDialogFlowManagerSpec.scala       |  12 +-
 .../internal/impl/NCModelCallbacksSpec.scala       |   7 +-
 .../nlpcraft/internal/impl/NCModelClientSpec.scala |  11 +-
 .../internal/impl/NCModelClientSpec2.scala         |  25 +-
 .../internal/impl/NCModelClientSpec3.scala         |   7 +-
 .../internal/impl/NCModelPingPongSpec.scala        |  11 +-
 .../internal/impl/NCPipelineManagerSpec.scala      |   6 +-
 .../impl/scan/NCModelIntentsInvalidArgsSpec.scala  |  13 +-
 .../scan/NCModelIntentsInvalidIntentsSpec.scala    |   1 +
 .../impl/scan/NCModelIntentsNestedSpec.scala       |   2 +-
 .../internal/impl/scan/NCModelIntentsSpec.scala    |   1 -
 .../internal/impl/scan/NCTestModelJava.java        |  67 ----
 .../internal/impl/scan/NCTestModelScala.scala      |   2 +-
 .../intent/compiler/functions/NCIDLFunctions.scala |   6 +-
 .../compiler/functions/NCIDLFunctionsModel.scala   |   8 +-
 .../apache/nlpcraft/nlp/NCEntityEnricherSpec.scala |   8 +-
 .../apache/nlpcraft/nlp/NCEntityMapperSpec.scala   |  37 ++-
 .../nlpcraft/nlp/NCEntityValidatorSpec.scala       |   9 +-
 .../apache/nlpcraft/nlp/NCTokenEnricherSpec.scala  |   8 +-
 .../apache/nlpcraft/nlp/NCTokenValidatorSpec.scala |   9 +-
 .../apache/nlpcraft/nlp/NCVariantFilterSpec.scala  |   8 +-
 .../nlp/benchmark/client/NCClientBenchmark.java    | 150 ++++-----
 .../nlp/entity/parser/NCNLPEntityParserSpec.scala  |   8 +-
 .../entity/parser/NCOpenNLPEntityParserSpec.scala  |   8 +-
 .../semantic/NCSemanticEntityParserJsonSpec.scala  |   2 +-
 .../semantic/NCSemanticEntityParserLemmaSpec.scala |  29 +-
 .../semantic/NCSemanticEntityParserSpec.scala      |  18 +-
 .../semantic/NCSemanticEntityParserYamlSpec.scala  |   2 +-
 .../parser/semantic/NCSemanticTestElement.scala    |   9 +-
 .../enricher/NCBracketsTokenEnricherSpec.scala     |   9 +-
 .../enricher/NCDictionaryTokenEnricherSpec.scala   |   6 +-
 .../token/enricher/NCQuotesTokenEnricherSpec.scala |   5 +-
 .../token/enricher/NCStopWordsEnricherSpec.scala   |  11 +-
 .../enricher/NCSwearWordsTokenEnricherSpec.scala   |   4 +-
 .../token/enricher/impl/NCStopWordsImplSpec.scala  |   9 +-
 .../token/parser/NCOpenNLPTokenParserSpec.scala    |   7 +-
 .../apache/nlpcraft/nlp/util/NCTestConfig.scala    |  13 +-
 .../apache/nlpcraft/nlp/util/NCTestConfigJava.java |  39 ---
 .../apache/nlpcraft/nlp/util/NCTestEntity.scala    |   7 +-
 .../nlpcraft/nlp/util/NCTestModelAdapter.scala     |   4 +-
 .../apache/nlpcraft/nlp/util/NCTestPipeline.scala  |  28 +-
 .../apache/nlpcraft/nlp/util/NCTestRequest.scala   |   4 +-
 .../org/apache/nlpcraft/nlp/util/NCTestToken.scala |   7 +-
 .../org/apache/nlpcraft/nlp/util/NCTestUtils.scala |  23 +-
 191 files changed, 2478 insertions(+), 5596 deletions(-)

diff --git a/nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchFrModel.scala b/nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/LightSwitchFrModel.scala
similarity index 97%
rename from nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchFrModel.scala
rename to nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/LightSwitchFrModel.scala
index 77068013..4cfeeae9 100644
--- a/nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchFrModel.scala
+++ b/nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/LightSwitchFrModel.scala
@@ -20,12 +20,12 @@ package org.apache.nlpcraft.examples.lightswitch
 import com.google.gson.Gson
 import org.apache.nlpcraft.*
 import org.apache.nlpcraft.examples.lightswitch.nlp.entity.parser.NCFrSemanticEntityParser
-import org.apache.nlpcraft.examples.lightswitch.nlp.token.enricher.{NCFrLemmaPosTokenEnricher, NCFrStopWordsTokenEnricher}
+import org.apache.nlpcraft.examples.lightswitch.nlp.token.enricher.*
 import org.apache.nlpcraft.examples.lightswitch.nlp.token.parser.NCFrTokenParser
 import org.apache.nlpcraft.nlp.entity.parser.*
 import org.apache.nlpcraft.nlp.token.enricher.*
 import org.apache.nlpcraft.nlp.token.parser.NCOpenNLPTokenParser
-
+import org.apache.nlpcraft.annotations.*
 import java.util
 import scala.jdk.CollectionConverters.*
 
@@ -48,7 +48,7 @@ class LightSwitchFrModel extends NCModelAdapter(
         withTokenEnricher(new NCFrLemmaPosTokenEnricher()).
         withTokenEnricher(new NCFrStopWordsTokenEnricher()).
         withEntityParser(new NCFrSemanticEntityParser("lightswitch_model_fr.yaml")).
-        build()
+        build
 ):
     /**
       * Intent and its on-match callback.
@@ -87,7 +87,7 @@ class LightSwitchFrModel extends NCModelAdapter(
         @NCIntentTerm("loc") locEnts: List[NCEntity]
     ): NCResult =
         val action = if actEnt.getId == "ls:on" then "allumer" else "éteindre"
-        val locations = if locEnts.isEmpty then "toute la maison" else locEnts.map(_.mkText()).mkString(", ")
+        val locations = if locEnts.isEmpty then "toute la maison" else locEnts.map(_.mkText).mkString(", ")
 
         // Add HomeKit, Arduino or other integration here.
 
diff --git a/nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/entity/parser/NCFrSemanticEntityParser.scala b/nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/entity/parser/NCFrSemanticEntityParser.scala
similarity index 98%
rename from nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/entity/parser/NCFrSemanticEntityParser.scala
rename to nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/entity/parser/NCFrSemanticEntityParser.scala
index ceeaf101..1a2aab80 100644
--- a/nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/entity/parser/NCFrSemanticEntityParser.scala
+++ b/nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/entity/parser/NCFrSemanticEntityParser.scala
@@ -32,5 +32,5 @@ class NCFrSemanticEntityParser(src: String) extends NCSemanticEntityParser(
         override def stem(txt: String): String = stemmer.synchronized { stemmer.stem(txt.toLowerCase).toString }
     ,
     new NCFrTokenParser(),
-    src
+    mdlSrc = src
 )
diff --git a/nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCFrLemmaPosTokenEnricher.scala b/nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCFrLemmaPosTokenEnricher.scala
similarity index 94%
rename from nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCFrLemmaPosTokenEnricher.scala
rename to nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCFrLemmaPosTokenEnricher.scala
index 92db11f4..92e62dba 100644
--- a/nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCFrLemmaPosTokenEnricher.scala
+++ b/nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCFrLemmaPosTokenEnricher.scala
@@ -21,8 +21,6 @@ import org.apache.nlpcraft.*
 import org.languagetool.AnalyzedToken
 import org.languagetool.tagging.fr.FrenchTagger
 
-import java.util
-import java.util.stream.Collectors
 import scala.jdk.CollectionConverters.*
 
 /**
@@ -31,8 +29,7 @@ import scala.jdk.CollectionConverters.*
 class NCFrLemmaPosTokenEnricher extends NCTokenEnricher:
     private def nvl(v: String, dflt : => String): String = if v != null then v else dflt
 
-    override def enrich(req: NCRequest, cfg: NCModelConfig, toksList: util.List[NCToken]): Unit =
-        val toks = toksList.asScala
+    override def enrich(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): Unit =
         val tags = FrenchTagger.INSTANCE.tag(toks.map(_.getText).asJava).asScala
 
         require(toks.size == tags.size)
diff --git a/nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCFrStopWordsTokenEnricher.scala b/nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCFrStopWordsTokenEnricher.scala
similarity index 86%
rename from nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCFrStopWordsTokenEnricher.scala
rename to nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCFrStopWordsTokenEnricher.scala
index 335d00ff..be791402 100644
--- a/nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCFrStopWordsTokenEnricher.scala
+++ b/nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCFrStopWordsTokenEnricher.scala
@@ -29,11 +29,11 @@ import scala.jdk.CollectionConverters.*
 class NCFrStopWordsTokenEnricher extends NCTokenEnricher:
     private final val stops = FrenchAnalyzer.getDefaultStopSet
 
-    private def getPos(t: NCToken): String = t.getOpt("pos").orElseThrow(() => throw new NCException("POS not found in token."))
-    private def getLemma(t: NCToken): String = t.getOpt("lemma").orElseThrow(() => throw new NCException("Lemma not found in token."))
+    private def getPos(t: NCToken): String = t.getOpt("pos").getOrElse(throw new NCException("POS not found in token."))
+    private def getLemma(t: NCToken): String = t.getOpt("lemma").getOrElse(throw new NCException("Lemma not found in token."))
 
-    override def enrich(req: NCRequest, cfg: NCModelConfig, toks: util.List[NCToken]): Unit =
-        for (t <- toks.asScala)
+    override def enrich(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): Unit =
+        for (t <- toks)
             val lemma = getLemma(t)
             lazy val pos = getPos(t)
 
diff --git a/nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/parser/NCFrTokenParser.scala b/nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/parser/NCFrTokenParser.scala
similarity index 89%
rename from nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/parser/NCFrTokenParser.scala
rename to nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/parser/NCFrTokenParser.scala
index 45eac3b0..09ab107b 100644
--- a/nlpcraft-examples/lightswitch-fr/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/parser/NCFrTokenParser.scala
+++ b/nlpcraft-examples/lightswitch-fr/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/parser/NCFrTokenParser.scala
@@ -21,7 +21,6 @@ import org.apache.nlpcraft.*
 import org.languagetool.tokenizers.WordTokenizer
 import org.languagetool.tokenizers.fr.FrenchWordTokenizer
 
-import java.util
 import scala.jdk.CollectionConverters.*
 
 /**
@@ -30,11 +29,11 @@ import scala.jdk.CollectionConverters.*
 class NCFrTokenParser extends NCTokenParser:
     private val tokenizer = new FrenchWordTokenizer
 
-    override def tokenize(text: String): util.List[NCToken] =
+    override def tokenize(text: String): List[NCToken] =
         val toks = collection.mutable.ArrayBuffer.empty[NCToken]
         var sumLen = 0
 
-        for (((word, len), idx) <- tokenizer.tokenize(text).asScala.map(p => p -> p.length).zipWithIndex)
+        for ((word, idx) <- tokenizer.tokenize(text).asScala.zipWithIndex)
             val start = sumLen
             val end = sumLen + word.length
 
@@ -47,4 +46,4 @@ class NCFrTokenParser extends NCTokenParser:
 
             sumLen = end
 
-        toks.asJava
+        toks.toList
diff --git a/nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchRuModel.scala b/nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/LightSwitchRuModel.scala
similarity index 96%
rename from nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchRuModel.scala
rename to nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/LightSwitchRuModel.scala
index 821fa498..86abeff1 100644
--- a/nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchRuModel.scala
+++ b/nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/LightSwitchRuModel.scala
@@ -20,13 +20,13 @@ package org.apache.nlpcraft.examples.lightswitch
 import com.google.gson.Gson
 import org.apache.nlpcraft.*
 import org.apache.nlpcraft.examples.lightswitch.nlp.entity.parser.NCRuSemanticEntityParser
-import org.apache.nlpcraft.examples.lightswitch.nlp.token.enricher.{NCRuLemmaPosTokenEnricher, NCRuStopWordsTokenEnricher}
+import org.apache.nlpcraft.examples.lightswitch.nlp.token.enricher.*
 import org.apache.nlpcraft.examples.lightswitch.nlp.token.parser.NCRuTokenParser
 import org.apache.nlpcraft.nlp.entity.parser.NCNLPEntityParser
 import org.apache.nlpcraft.nlp.entity.parser.semantic.NCSemanticEntityParser
 import org.apache.nlpcraft.nlp.token.enricher.NCEnStopWordsTokenEnricher
 import org.apache.nlpcraft.nlp.token.parser.NCOpenNLPTokenParser
-
+import org.apache.nlpcraft.annotations.*
 import java.util
 import scala.jdk.CollectionConverters.*
 
@@ -41,7 +41,7 @@ class LightSwitchRuModel extends NCModelAdapter(
         withTokenEnricher(new NCRuLemmaPosTokenEnricher()).
         withTokenEnricher(new NCRuStopWordsTokenEnricher()).
         withEntityParser(new NCRuSemanticEntityParser("lightswitch_model_ru.yaml")).
-        build()
+        build
 ):
     /**
       * Intent and its on-match callback.
@@ -71,7 +71,7 @@ class LightSwitchRuModel extends NCModelAdapter(
         @NCIntentTerm("loc") locEnts: List[NCEntity]
     ): NCResult =
         val action = if actEnt.getId == "ls:on" then "включить" else "выключить"
-        val locations = if locEnts.isEmpty then "весь дом" else locEnts.map(_.mkText()).mkString(", ")
+        val locations = if locEnts.isEmpty then "весь дом" else locEnts.map(_.mkText).mkString(", ")
 
         // Add HomeKit, Arduino or other integration here.
         // By default - just return a descriptive action string.
diff --git a/nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/entity/parser/NCRuSemanticEntityParser.scala b/nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/entity/parser/NCRuSemanticEntityParser.scala
similarity index 93%
rename from nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/entity/parser/NCRuSemanticEntityParser.scala
rename to nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/entity/parser/NCRuSemanticEntityParser.scala
index ecdd7254..c1e3f38a 100644
--- a/nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/entity/parser/NCRuSemanticEntityParser.scala
+++ b/nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/entity/parser/NCRuSemanticEntityParser.scala
@@ -20,7 +20,7 @@ package org.apache.nlpcraft.examples.lightswitch.nlp.entity.parser
 import opennlp.tools.stemmer.snowball.SnowballStemmer
 import org.apache.nlpcraft.examples.lightswitch.nlp.token.parser.NCRuTokenParser
 import org.apache.nlpcraft.nlp.entity.parser.*
-import org.apache.nlpcraft.nlp.entity.parser.semantic.{NCSemanticEntityParser, NCSemanticStemmer}
+import org.apache.nlpcraft.nlp.entity.parser.semantic.*
 
 /**
   *
@@ -32,5 +32,5 @@ class NCRuSemanticEntityParser(src: String) extends NCSemanticEntityParser(
         override def stem(txt: String): String = stemmer.synchronized { stemmer.stem(txt.toLowerCase).toString }
     ,
     new NCRuTokenParser(),
-    src
+    mdlSrc = src
 )
diff --git a/nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCRuLemmaPosTokenEnricher.scala b/nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCRuLemmaPosTokenEnricher.scala
similarity index 94%
rename from nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCRuLemmaPosTokenEnricher.scala
rename to nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCRuLemmaPosTokenEnricher.scala
index 2a9bfd3a..a243f874 100644
--- a/nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCRuLemmaPosTokenEnricher.scala
+++ b/nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCRuLemmaPosTokenEnricher.scala
@@ -21,8 +21,6 @@ import org.apache.nlpcraft.*
 import org.languagetool.AnalyzedToken
 import org.languagetool.tagging.ru.RussianTagger
 
-import java.util
-import java.util.stream.Collectors
 import scala.jdk.CollectionConverters.*
 
 /**
@@ -31,8 +29,7 @@ import scala.jdk.CollectionConverters.*
 class NCRuLemmaPosTokenEnricher extends NCTokenEnricher:
     private def nvl(v: String, dflt : => String): String = if v != null then v else dflt
 
-    override def enrich(req: NCRequest, cfg: NCModelConfig, toksList: util.List[NCToken]): Unit =
-        val toks = toksList.asScala
+    override def enrich(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): Unit =
         val tags = RussianTagger.INSTANCE.tag(toks.map(_.getText).asJava).asScala
 
         require(toks.size == tags.size)
diff --git a/nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCRuStopWordsTokenEnricher.scala b/nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCRuStopWordsTokenEnricher.scala
similarity index 83%
rename from nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCRuStopWordsTokenEnricher.scala
rename to nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCRuStopWordsTokenEnricher.scala
index e675ed4f..1759c70f 100644
--- a/nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCRuStopWordsTokenEnricher.scala
+++ b/nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/enricher/NCRuStopWordsTokenEnricher.scala
@@ -20,20 +20,17 @@ package org.apache.nlpcraft.examples.lightswitch.nlp.token.enricher
 import org.apache.lucene.analysis.ru.RussianAnalyzer
 import org.apache.nlpcraft.*
 
-import java.util
-import scala.jdk.CollectionConverters.*
-
 /**
   *
   */
 class NCRuStopWordsTokenEnricher extends NCTokenEnricher:
     private final val stops = RussianAnalyzer.getDefaultStopSet
 
-    private def getPos(t: NCToken): String = t.getOpt("pos").orElseThrow(() => throw new NCException("POS not found in token."))
-    private def getLemma(t: NCToken): String = t.getOpt("lemma").orElseThrow(() => throw new NCException("Lemma not found in token."))
+    private def getPos(t: NCToken): String = t.getOpt("pos").getOrElse(throw new NCException("POS not found in token."))
+    private def getLemma(t: NCToken): String = t.getOpt("lemma").getOrElse(throw new NCException("Lemma not found in token."))
 
-    override def enrich(req: NCRequest, cfg: NCModelConfig, toks: util.List[NCToken]): Unit =
-        for (t <- toks.asScala)
+    override def enrich(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): Unit =
+        for (t <- toks)
             val lemma = getLemma(t)
             lazy val pos = getPos(t)
 
diff --git a/nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/parser/NCRuTokenParser.scala b/nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/parser/NCRuTokenParser.scala
similarity index 88%
rename from nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/parser/NCRuTokenParser.scala
rename to nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/parser/NCRuTokenParser.scala
index bf0440ec..af31fff9 100644
--- a/nlpcraft-examples/lightswitch-ru/src/main/java/org/apache/nlpcraft/examples/lightswitch/nlp/token/parser/NCRuTokenParser.scala
+++ b/nlpcraft-examples/lightswitch-ru/src/main/scala/org/apache/nlpcraft/examples/lightswitch/nlp/token/parser/NCRuTokenParser.scala
@@ -20,7 +20,6 @@ package org.apache.nlpcraft.examples.lightswitch.nlp.token.parser
 import org.apache.nlpcraft.*
 import org.languagetool.tokenizers.WordTokenizer
 
-import java.util
 import scala.jdk.CollectionConverters.*
 
 /**
@@ -29,11 +28,11 @@ import scala.jdk.CollectionConverters.*
 class NCRuTokenParser extends NCTokenParser:
     private val tokenizer = new WordTokenizer
 
-    override def tokenize(text: String): util.List[NCToken] =
+    override def tokenize(text: String): List[NCToken] =
         val toks = collection.mutable.ArrayBuffer.empty[NCToken]
         var sumLen = 0
 
-        for (((word, len), idx) <- tokenizer.tokenize(text).asScala.map(p => p -> p.length).zipWithIndex)
+        for ((word, idx) <- tokenizer.tokenize(text).asScala.zipWithIndex)
             val start = sumLen
             val end = sumLen + word.length
             
@@ -46,4 +45,4 @@ class NCRuTokenParser extends NCTokenParser:
 
             sumLen = end
 
-        toks.asJava
\ No newline at end of file
+        toks.toList
\ No newline at end of file
diff --git a/nlpcraft-examples/lightswitch/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchGroovyModel.groovy b/nlpcraft-examples/lightswitch/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchGroovyModel.groovy
deleted file mode 100644
index 7aaa54eb..00000000
--- a/nlpcraft-examples/lightswitch/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchGroovyModel.groovy
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.examples.lightswitch
-
-import org.apache.nlpcraft.*
-
-/**
- * This example provides very simple implementation for NLI-powered light switch.
- * You can say something like this:
- * <ul>
- *     <li>"Turn the lights off in the entire house."</li>
- *     <li>"Switch on the illumination in the master bedroom closet."</li>
- * </ul>
- * You can easily modify intent callbacks to perform the actual light switching using
- * HomeKit or Arduino-based controllers.
- * <p>
- * See 'README.md' file in the same folder for running and testing instructions.
- */
-class LightSwitchGroovyModel extends NCModelAdapter {
-    LightSwitchGroovyModel() {
-        super(
-            new NCModelConfig("nlpcraft.lightswitch.java.ex", "LightSwitch Example Model", "1.0"),
-            new NCPipelineBuilder().withSemantic("en", "lightswitch_model.yaml").build()
-        )
-    }
-
-    /**
-     * Intent and its on-match callback.
-     *
-     * @param actEnt Token from 'act' term (guaranteed to be one).
-     * @param locEnts Tokens from 'loc' term (zero or more).
-     * @return Query result to be sent to the REST caller.
-     */
-    @NCIntent("intent=ls term(act)={has(ent_groups, 'act')} term(loc)={# == 'ls:loc'}*")
-    @NCIntentSample([
-            "Turn the lights off in the entire house.",
-            "Turn off all lights now",
-            "Switch on the illumination in the master bedroom closet.",
-            "Off the lights on the 1st floor",
-            "Get the lights on.",
-            "Lights up in the kitchen.",
-            "Please, put the light out in the upstairs bedroom.",
-            "Set the lights on in the entire house.",
-            "Turn the lights off in the guest bedroom.",
-            "Could you please switch off all the lights?",
-            "Dial off illumination on the 2nd floor.",
-            "Turn down lights in 1st floor bedroom",
-            "Lights on at second floor kitchen",
-            "Please, no lights!",
-            "Kill off all the lights now!",
-            "Down the lights in the garage",
-            "Lights down in the kitchen!",
-            "Turn up the illumination in garage and master bedroom",
-            "Turn down all the light now!",
-            "No lights in the bedroom, please.",
-            "Light up the garage, please!",
-            "Kill the illumination now!"
-    ])
-    static NCResult onMatch(
-        @NCIntentTerm("act") NCEntity actEnt,
-        @NCIntentTerm("loc") List<NCEntity> locEnts) {
-        String status = actEnt.id == "ls:on" ? "on" : "off"
-        String locations = locEnts ? locEnts*.mkText().join(", ") : "entire house"
-
-        // Add HomeKit, Arduino or other integration here.
-
-        // By default - just return a descriptive action string.
-        new NCResult(
-            "Lights are [$status] in [${locations.toLowerCase()}].",
-            NCResultType.ASK_RESULT
-        )
-    }
-}
diff --git a/nlpcraft-examples/lightswitch/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchJavaModel.java b/nlpcraft-examples/lightswitch/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchJavaModel.java
deleted file mode 100644
index a13b6d64..00000000
--- a/nlpcraft-examples/lightswitch/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchJavaModel.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.examples.lightswitch;
-
-import org.apache.nlpcraft.*;
-
-import java.util.List;
-import java.util.stream.Collectors;
-
-/**
- * This example provides very simple implementation for NLI-powered light switch.
- * You can say something like this:
- * <ul>
- *     <li>"Turn the lights off in the entire house."</li>
- *     <li>"Switch on the illumination in the master bedroom closet."</li>
- * </ul>
- * You can easily modify intent callbacks to perform the actual light switching using
- * HomeKit or Arduino-based controllers.
- * <p>
- * See 'README.md' file in the same folder for running and testing instructions. 
- */
-public class LightSwitchJavaModel extends NCModelAdapter {
-    public LightSwitchJavaModel() {
-        super(
-            new NCModelConfig("nlpcraft.lightswitch.java.ex", "LightSwitch Example Model", "1.0"),
-            new NCPipelineBuilder().withSemantic("en", "lightswitch_model.yaml").build()
-        );
-    }
-
-    /**
-     * Intent and its on-match callback.
-     *
-     * @param actEnt  Token from 'act' term (guaranteed to be one).
-     * @param locEnts Tokens from 'loc' term (zero or more).
-     * @return Query result to be sent to the REST caller.
-     */
-    @NCIntent("intent=ls term(act)={has(ent_groups, 'act')} term(loc)={# == 'ls:loc'}*")
-    @NCIntentSample({
-        "Turn the lights off in the entire house.",
-        "Turn off all lights now",
-        "Switch on the illumination in the master bedroom closet.",
-        "Off the lights on the 1st floor",
-        "Get the lights on.",
-        "Lights up in the kitchen.",
-        "Please, put the light out in the upstairs bedroom.",
-        "Set the lights on in the entire house.",
-        "Turn the lights off in the guest bedroom.",
-        "Could you please switch off all the lights?",
-        "Dial off illumination on the 2nd floor.",
-        "Turn down lights in 1st floor bedroom",
-        "Lights on at second floor kitchen",
-        "Please, no lights!",
-        "Kill off all the lights now!",
-        "Down the lights in the garage",
-        "Lights down in the kitchen!",
-        "Turn up the illumination in garage and master bedroom",
-        "Turn down all the light now!",
-        "No lights in the bedroom, please.",
-        "Light up the garage, please!",
-        "Kill the illumination now!"
-    })
-    NCResult onMatch(
-        @NCIntentTerm("act") NCEntity actEnt,
-        @NCIntentTerm("loc") List<NCEntity> locEnts) {
-        String status = actEnt.getId().equals("ls:on") ? "on" : "off";
-        String locations = locEnts.isEmpty() ?
-            "entire house" :
-            locEnts.stream().map(NCEntity::mkText).collect(Collectors.joining(", "));
-
-        // Add HomeKit, Arduino or other integration here.
-
-        // By default - just return a descriptive action string.
-        return new NCResult(
-            "Lights are [" + status + "] in [" + locations.toLowerCase() + "].",
-            NCResultType.ASK_RESULT
-        );
-    }
-}
\ No newline at end of file
diff --git a/nlpcraft-examples/lightswitch/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchKotlinModel.kt b/nlpcraft-examples/lightswitch/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchKotlinModel.kt
deleted file mode 100644
index 001f6e7e..00000000
--- a/nlpcraft-examples/lightswitch/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchKotlinModel.kt
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.examples.lightswitch
-
-
-import org.apache.nlpcraft.*
-import java.util.*
-import java.util.stream.Collectors
-
-
-/**
- * This example provides very simple implementation for NLI-powered light switch.
- * You can say something like this:
- *
- *  - "Turn the lights off in the entire house."
- *  - "Switch on the illumination in the master bedroom closet."
- *
- * You can easily modify intent callbacks to perform the actual light switching using
- * HomeKit or Arduino-based controllers.
- *
- * See 'README.md' file in the same folder for running and testing instructions.
- */
-class LightSwitchKotlinModel : NCModelAdapter(
-    NCModelConfig("nlpcraft.lightswitch.kotlin.ex", "LightSwitch Example Model", "1.0"),
-    NCPipelineBuilder().withSemantic("en", "lightswitch_model.yaml").build()
-) {
-    /**
-     * Intent and its on-match callback.
-     *
-     * @param actEnt Token from 'act' term (guaranteed to be one).
-     * @param locEnts Tokens from 'loc' term (zero or more).
-     * @return Query result to be sent to the REST caller.
-     */
-    @NCIntent("intent=ls term(act)={has(ent_groups, 'act')} term(loc)={# == 'ls:loc'}*")
-    @NCIntentSample(
-        "Turn the lights off in the entire house.",
-        "Turn off all lights now",
-        "Switch on the illumination in the master bedroom closet.",
-        "Get the lights on.",
-        "Off the lights on the 1st floor",
-        "Lights up in the kitchen.",
-        "Please, put the light out in the upstairs bedroom.",
-        "Set the lights on in the entire house.",
-        "Turn the lights off in the guest bedroom.",
-        "Could you please switch off all the lights?",
-        "Dial off illumination on the 2nd floor.",
-        "Turn down lights in 1st floor bedroom",
-        "Lights on at second floor kitchen",
-        "Please, no lights!",
-        "Kill off all the lights now!",
-        "Down the lights in the garage",
-        "Lights down in the kitchen!",
-        "Turn up the illumination in garage and master bedroom",
-        "Turn down all the light now!",
-        "No lights in the bedroom, please.",
-        "Light up the garage, please!",
-        "Kill the illumination now!"
-    )
-    fun onMatch(
-        @NCIntentTerm("act") actEnt: NCEntity,
-        @NCIntentTerm("loc") locEnts: List<NCEntity>
-    ): NCResult {
-        val status = if (actEnt.id == "ls:on") "on" else "off"
-        val locations = if (locEnts.isEmpty()) "entire house" else locEnts.stream()
-            .map { t: NCEntity -> t.mkText() }
-            .collect(Collectors.joining(", "))
-
-        // Add HomeKit, Arduino or other integration here.
-
-        // By default - just return a descriptive action string.
-        return NCResult(
-            "Lights are [" + status + "] in [" + locations.lowercase(Locale.getDefault()) + "].",
-            NCResultType.ASK_RESULT
-        )
-    }
-}
\ No newline at end of file
diff --git a/nlpcraft-examples/lightswitch/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchScalaModel.scala b/nlpcraft-examples/lightswitch/src/main/scala/org/apache/nlpcraft/examples/lightswitch/LightSwitchScalaModel.scala
similarity index 97%
rename from nlpcraft-examples/lightswitch/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchScalaModel.scala
rename to nlpcraft-examples/lightswitch/src/main/scala/org/apache/nlpcraft/examples/lightswitch/LightSwitchScalaModel.scala
index 0e15f270..ca62daeb 100644
--- a/nlpcraft-examples/lightswitch/src/main/java/org/apache/nlpcraft/examples/lightswitch/LightSwitchScalaModel.scala
+++ b/nlpcraft-examples/lightswitch/src/main/scala/org/apache/nlpcraft/examples/lightswitch/LightSwitchScalaModel.scala
@@ -18,6 +18,7 @@
 package org.apache.nlpcraft.examples.lightswitch
 
 import org.apache.nlpcraft.*
+import org.apache.nlpcraft.annotations.*
 import org.apache.nlpcraft.internal.util.NCResourceReader
 import org.apache.nlpcraft.nlp.*
 import org.apache.nlpcraft.nlp.entity.parser.semantic.NCSemanticEntityParser
@@ -40,7 +41,7 @@ import org.apache.nlpcraft.nlp.token.parser.NCOpenNLPTokenParser
 
 class LightSwitchScalaModel extends NCModelAdapter(
     new NCModelConfig("nlpcraft.lightswitch.java.ex", "LightSwitch Example Model", "1.0"),
-    new NCPipelineBuilder().withSemantic("en", "lightswitch_model.yaml").build()
+    new NCPipelineBuilder().withSemantic("en", "lightswitch_model.yaml").build
 ):
     /**
       * Intent and its on-match callback.
@@ -79,7 +80,7 @@ class LightSwitchScalaModel extends NCModelAdapter(
         @NCIntentTerm("loc") locEnts: List[NCEntity]
     ): NCResult =
         val status = if actEnt.getId == "ls:on" then "on" else "off"
-        val locations = if locEnts.isEmpty then "entire house" else locEnts.map(_.mkText()).mkString(", ")
+        val locations = if locEnts.isEmpty then "entire house" else locEnts.map(_.mkText).mkString(", ")
 
         // Add HomeKit, Arduino or other integration here.
 
diff --git a/nlpcraft-examples/lightswitch/src/test/java/org/apache/nlpcraft/examples/lightswitch/NCModelValidationSpec.scala b/nlpcraft-examples/lightswitch/src/test/java/org/apache/nlpcraft/examples/lightswitch/NCModelValidationSpec.scala
index 689f2f92..b95c943e 100644
--- a/nlpcraft-examples/lightswitch/src/test/java/org/apache/nlpcraft/examples/lightswitch/NCModelValidationSpec.scala
+++ b/nlpcraft-examples/lightswitch/src/test/java/org/apache/nlpcraft/examples/lightswitch/NCModelValidationSpec.scala
@@ -29,13 +29,4 @@ class NCModelValidationSpec:
     private def test(mdl: NCModel): Unit = Using.resource(new NCModelClient(mdl)) { _.validateSamples() }
 
     @Test
-    def testJava(): Unit = test(new LightSwitchJavaModel())
-
-    @Test
-    def testGroovy(): Unit = test(new LightSwitchGroovyModel())
-
-    @Test
-    def testKotlin(): Unit = test(new LightSwitchKotlinModel())
-
-    @Test
-    def testScala(): Unit = test(new LightSwitchScalaModel())
+    def test(): Unit = test(new LightSwitchScalaModel())
diff --git a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/TimeModel.java b/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/TimeModel.java
deleted file mode 100644
index 5cbda731..00000000
--- a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/TimeModel.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.examples.time;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
-import org.apache.nlpcraft.NCEntity;
-import org.apache.nlpcraft.NCIntent;
-import org.apache.nlpcraft.NCIntentMatch;
-import org.apache.nlpcraft.NCIntentRef;
-import org.apache.nlpcraft.NCIntentSample;
-import org.apache.nlpcraft.NCIntentTerm;
-import org.apache.nlpcraft.NCModelAdapter;
-import org.apache.nlpcraft.NCModelConfig;
-import org.apache.nlpcraft.NCPipelineBuilder;
-import org.apache.nlpcraft.NCRejection;
-import org.apache.nlpcraft.NCResult;
-import org.apache.nlpcraft.NCResultType;
-import org.apache.nlpcraft.examples.time.utils.cities.CitiesDataProvider;
-import org.apache.nlpcraft.examples.time.utils.cities.City;
-import org.apache.nlpcraft.examples.time.utils.cities.CityData;
-import org.apache.nlpcraft.examples.time.utils.keycdn.GeoData;
-import org.apache.nlpcraft.examples.time.utils.keycdn.GeoManager;
-import org.apache.nlpcraft.internal.util.NCResourceReader;
-import org.apache.nlpcraft.nlp.entity.parser.NCOpenNLPEntityParser;
-
-import java.time.ZoneId;
-import java.time.ZonedDateTime;
-import java.time.format.DateTimeFormatter;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Optional;
-
-import static java.time.format.FormatStyle.MEDIUM;
-
-/**
- * Time example data model.
- * <p>
- * This example answers the questions about current time, either local or at some city.
- * It provides YAML response with time and timezone information.
- * <p>
- * See 'README.md' file in the same folder for running and testing instructions.
- */
-// Declaring intents on the class level + fragment usage for demo purposes.
-@NCIntent("fragment=city term(city)~{# == 'opennlp:location'}")
-@NCIntent("intent=intent2 term~{# == 'x:time'} fragment(city)")
-@NCIntent("intent=intent1 term={# == 'x:time'}")
-public class TimeModel extends NCModelAdapter {
-    // Medium data formatter.
-    static private final DateTimeFormatter FMT = DateTimeFormatter.ofLocalizedDateTime(MEDIUM);
-
-    // Map of cities and their geo and timezone information.
-    static private final Map<City, CityData> citiesData = CitiesDataProvider.get();
-
-    // Geo manager.
-    static private final GeoManager geoMrg = new GeoManager();
-
-    /**
-     * Initializes model.
-     */
-    public TimeModel() {
-        super(
-            new NCModelConfig("nlpcraft.time.ex", "Time Example Model", "1.0"),
-            new NCPipelineBuilder().
-                withSemantic("en", "time_model.yaml").
-                withEntityParser(new NCOpenNLPEntityParser(NCResourceReader.getPath("opennlp/en-ner-location.bin"))).
-                build()
-        );
-    }
-
-    /**
-     * Gets YAML query result.
-     *
-     * @param city  Detected city.
-     * @param cntry Detected country.
-     * @param tmz Timezone ID.
-     * @param lat City latitude.
-     * @param lon City longitude.
-     */
-    private static NCResult mkResult(String city, String cntry, String tmz, double lat, double lon) {
-        Map<String, Object> m = new HashMap<>();
-
-        m.put("city", capitalize(city));
-        m.put("country", capitalize(cntry));
-        m.put("timezone", tmz);
-        m.put("lat", lat);
-        m.put("lon", lon);
-        m.put("localTime", ZonedDateTime.now(ZoneId.of(tmz)).format(FMT));
-
-        try {
-            return new NCResult(
-                new ObjectMapper(new YAMLFactory()).writeValueAsString(m),
-                NCResultType.ASK_RESULT
-            );
-        }
-        catch (JsonProcessingException e) {
-            throw new RuntimeException("YAML conversion error.", e);
-        }
-    }
-
-    /**
-     * 
-     * @param s
-     * @return
-     */
-    private static String capitalize(String s) {
-        return s == null || s.isEmpty() ? s : s.substring(0, 1).toUpperCase() + s.substring(1, s.length());
-    }
-
-    /**
-     * Callback on remote time intent match.
-     *
-     * @param cityEnt Token for 'geo' term.
-     * @return Query result.
-     */
-    @NCIntentRef("intent2")
-    @NCIntentSample({
-        "What time is it now in New York City?",
-        "What's the current time in Moscow?",
-        "Show me time of the day in London.",
-        "Can you please give me the Tokyo's current date and time."
-    })
-    private NCResult onRemoteMatch(@NCIntentTerm("city") NCEntity cityEnt) {
-        String cityName = cityEnt.mkText();
-
-        Optional<Map.Entry<City, CityData>> dataOpt =
-            citiesData.entrySet().stream().filter(p -> p.getKey().getName().equalsIgnoreCase(cityName)).findAny();
-
-        if (dataOpt.isPresent()) {
-            Map.Entry<City, CityData> e = dataOpt.get();
-            City city = e.getKey();
-            CityData data = e.getValue();
-
-            return mkResult(city.getName(), city.getCountry(), data.getTimezone(), data.getLatitude(), data.getLongitude());
-        }
-
-        // We don't have timezone mapping for parsed GEO location.
-        // Instead of defaulting to a local time - we reject with a specific error message for cleaner UX.
-        throw new NCRejection(String.format("No timezone mapping for %s.", cityName));
-    }
-
-    /**
-     * Callback on local time intent match.
-     *
-     * @param ctx Intent solver context.
-     * @return Query result.
-     */
-    @NCIntentRef("intent1")
-    @NCIntentSample({
-        "What's the local time?"
-    })
-    private NCResult onLocalMatch(NCIntentMatch ctx) {
-        // NOTE:
-        // We need to have two intents vs. one intent with an optional GEO. The reason is that
-        // first intent isn't using the conversation to make sure we can always ask
-        // for local time **no matter** what was asked before... Note that non-conversational
-        // intent always "wins" over the conversational one given otherwise equal weight because
-        // non-conversational intent is more specific (i.e. using only the most current user input).
-
-        // Check for exactly one 'x:time' token **without** looking into the conversation.
-        // That's an indication of asking for local time only.
-
-        Optional<GeoData> geoOpt = geoMrg.get(ctx.getContext().getRequest());
-
-        // Get local GEO data from sentence metadata defaulting to
-        // Silicon Valley location in case we are missing that info.
-        GeoData geo = geoOpt.orElseGet(geoMrg::getSiliconValley);
-
-        return mkResult(
-            geo.getCityName(), geo.getCountryName(), geo.getTimezoneName(), geo.getLatitude(), geo.getLongitude()
-        );
-    }
-}
diff --git a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/cities/CitiesDataProvider.java b/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/cities/CitiesDataProvider.java
deleted file mode 100644
index 3e85565e..00000000
--- a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/cities/CitiesDataProvider.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.examples.time.utils.cities;
-
-import org.apache.commons.lang3.tuple.Pair;
-import org.apache.nlpcraft.NCException;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.stream.Collectors;
-
-/**
- * City-timezone map provider.
- */
-public class CitiesDataProvider {
-    /**
-     * Creates and returns cities timezone map for all cities with a population &gt; 15000 or capitals.
-     *
-     * @return Cities timezone map.
-     */
-    public static Map<City, CityData> get() throws NCException {
-        try {
-            List<String> lines = new ArrayList<>();
-            
-            try (BufferedReader reader =
-                 new BufferedReader(new InputStreamReader(
-                     Objects.requireNonNull(
-                        CitiesDataProvider.class.
-                        getClassLoader().
-                        getResourceAsStream("cities_timezones.txt"))
-                ))) {
-                String line = reader.readLine();
-                
-                while (line != null) {
-                    lines.add(line);
-                    
-                    line = reader.readLine();
-                }
-            }
-            
-            return
-                lines.stream().
-                filter(p -> !p.startsWith("#")).
-                map(String::trim).
-                filter(p -> !p.isEmpty()).
-                map(p -> p.split("\t")).
-                map(p -> Arrays.stream(p).map(String::trim).toArray(String[]::new)).
-                map(arr ->
-                    Pair.of(
-                        new City(arr[0], arr[1]),
-                        new CityData(arr[2], Double.parseDouble(arr[3]), Double.parseDouble(arr[4])))
-                ).
-                collect(Collectors.toMap(Pair::getKey, Pair::getValue));
-        }
-        catch (IOException e) {
-            throw new NCException("Failed to read data file.", e);
-        }
-    }
-}
diff --git a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/cities/City.java b/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/cities/City.java
deleted file mode 100644
index 718b308a..00000000
--- a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/cities/City.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.examples.time.utils.cities;
-
-import java.util.Objects;
-
-/**
- * City data object.
- */
-public class City {
-    private final String name;
-    private final String country;
-
-    /**
-     * Creates new city object.
-     *
-     * @param name City name.
-     * @param country City country.
-     */
-    public City(String name, String country) {
-        this.name = name;
-        this.country = country;
-    }
-
-    /**
-     * Gets city name.
-     *
-     * @return City name.
-     */
-    public String getName() {
-        return name;
-    }
-
-    /**
-     * Gets city country.
-     *
-     * @return City country.
-     */
-    public String getCountry() {
-        return country;
-    }
-
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) {
-            return true;
-        }
-        if (o == null || getClass() != o.getClass()) {
-            return false;
-        }
-
-        City city = (City) o;
-
-        return (Objects.equals(name, city.name)) && (Objects.equals(country, city.country));
-    }
-
-    @Override
-    public int hashCode() {
-        int result = name != null ? name.hashCode() : 0;
-
-        result = 31 * result + (country != null ? country.hashCode() : 0);
-
-        return result;
-    }
-}
diff --git a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/cities/CityData.java b/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/cities/CityData.java
deleted file mode 100644
index 3fa40c8f..00000000
--- a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/cities/CityData.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.examples.time.utils.cities;
-
-/**
- * City data holder.
- */
-public class CityData {
-    private final String timezone;
-    private final double latitude;
-    private final double longitude;
-
-    /**
-     * Creates new city data holder.
-     *
-     * @param timezone City timezone
-     * @param latitude City latitude.
-     * @param longitude City longitude.
-     */
-    public CityData(String timezone, double latitude, double longitude) {
-        this.timezone = timezone;
-        this.latitude = latitude;
-        this.longitude = longitude;
-    }
-
-    /**
-     * Gets timezone.
-     *
-     * @return City timezone.
-     */
-    public String getTimezone() {
-        return timezone;
-    }
-
-    /**
-     * Gets latitude.
-     *
-     * @return City latitude.
-     */
-    public double getLatitude() {
-        return latitude;
-    }
-
-    /**
-     * Gets longitude.
-     *
-     * @return City longitude.
-     */
-    public double getLongitude() {
-        return longitude;
-    }
-}
diff --git a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/keycdn/GeoData.java b/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/keycdn/GeoData.java
deleted file mode 100644
index 2a3e9402..00000000
--- a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/keycdn/GeoData.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.examples.time.utils.keycdn;
-
-import com.google.gson.annotations.SerializedName;
-
-/**
- * Service https://tools.keycdn.com/geo response part bean. Geo data holder.
- */
-public class GeoData {
-    @SerializedName("country_name") private String countryName;
-    @SerializedName("city") private String cityName;
-    @SerializedName("latitude") private double latitude;
-    @SerializedName("longitude") private double longitude;
-    @SerializedName("timezone") private String timezoneName;
-
-    /**
-     * Gets country name.
-     *
-     * @return Country name.
-     */
-    public String getCountryName() {
-        return countryName;
-    }
-
-    /**
-     * Sets country name.
-     *
-     * @param countryName Country name to set.
-     */
-    public void setCountryName(String countryName) {
-        this.countryName = countryName;
-    }
-
-    /**
-     * Gets city name.
-     *
-     * @return City name.
-     */
-    public String getCityName() {
-        return cityName;
-    }
-
-    /**
-     * Set city name.
-     *
-     * @param cityName City name to set.
-     */
-    public void setCityName(String cityName) {
-        this.cityName = cityName;
-    }
-
-    /**
-     * Gets latitude.
-     *
-     * @return Latitude.
-     */
-    public double getLatitude() {
-        return latitude;
-    }
-
-    /**
-     * Sets latitude.
-     *
-     * @param latitude Latitude to set.
-     */
-    public void setLatitude(double latitude) {
-        this.latitude = latitude;
-    }
-
-    /**
-     * Gets longitude.
-     *
-     * @return Longitude.
-     */
-    public double getLongitude() {
-        return longitude;
-    }
-
-    /**
-     * Sets longitude.
-     *
-     * @param longitude Longitude to set.
-     */
-    public void setLongitude(double longitude) {
-        this.longitude = longitude;
-    }
-
-    /**
-     * Get timezone name.
-     *
-     * @return Timezone name.
-     */
-    public String getTimezoneName() {
-        return timezoneName;
-    }
-
-    /**
-     * Sets timezone name.
-     *
-     * @param timezoneName Timezone name to set.
-     */
-    public void setTimezoneName(String timezoneName) {
-        this.timezoneName = timezoneName;
-    }
-}
-
diff --git a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/keycdn/GeoManager.java b/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/keycdn/GeoManager.java
deleted file mode 100644
index 9de10ec3..00000000
--- a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/keycdn/GeoManager.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.examples.time.utils.keycdn;
-
-import com.google.gson.Gson;
-import org.apache.nlpcraft.NCRequest;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.text.MessageFormat;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Optional;
-import java.util.zip.GZIPInputStream;
-
-/**
- * Geo data finder.
- *
- * There are following restrictions to simplify example:
- *
- * 1. Finder's cache is never cleared.
- * 2. Implementation is not thread safe.
- * 3. Errors just forwarded to error console.
- * 4. Cache, which used to avoid rate-limiting requests (3 requests per second, see https://tools.keycdn.com/geo),
- *    applied only to successfully received GEO data.
- */
-public class GeoManager {
-    private static final String URL = "https://tools.keycdn.com/geo.json?host=";
-    private static final Gson GSON = new Gson();
-    
-    private final Map<String, GeoData> cache = new HashMap<>();
-    private String externalIp = null;
-    
-    /**
-     * Gets optional geo data by given sentence.
-     *
-     * @param sen Sentence.
-     * @return Geo data. Optional.
-     */
-    public Optional<GeoData> get(NCRequest sen) {
-        if (externalIp == null) {
-            try {
-                externalIp = getExternalIp();
-            }
-            catch (IOException e) {
-                System.err.println("External IP cannot be detected for localhost.");
-    
-                return Optional.empty();
-            }
-        }
-
-        try {
-            GeoData geo = cache.get(externalIp);
-    
-            if (geo != null)
-                return Optional.of(geo);
-            
-            HttpURLConnection conn = (HttpURLConnection)(new URL(URL + externalIp).openConnection());
-    
-            // This service requires "User-Agent" property with its own format.
-            conn.setRequestProperty("User-Agent", "keycdn-tools:https://nlpcraft.apache.org");
-    
-            try (InputStream in = conn.getInputStream()) {
-                String enc = conn.getContentEncoding();
-    
-                InputStream stream = enc != null && enc.equals("gzip") ? new GZIPInputStream(in) : in;
-                
-                Response resp =
-                    GSON.fromJson(new BufferedReader(new InputStreamReader(stream)), Response.class);
-        
-                if (!resp.getStatus().equals("success"))
-                    throw new IOException(
-                        MessageFormat.format(
-                            "Unexpected response [status={0}, description={1}]",
-                            resp.getStatus(),
-                            resp.getDescription())
-                    );
-        
-                geo = resp.getData().getGeo();
-                
-                cache.put(externalIp, geo);
-        
-                return Optional.of(geo);
-            }
-        }
-        catch (Exception e) {
-            System.err.println(
-                MessageFormat.format(
-                    "Unable to answer due to IP location finder (keycdn) error for host: {0}",
-                    externalIp
-                )
-            );
-    
-            e.printStackTrace(System.err);
-    
-            return Optional.empty();
-        }
-    }
-    
-    /**
-     * Gets external IP.
-     *
-     * @return External IP.
-     * @throws IOException If any errors occur.
-     */
-    private static String getExternalIp() throws IOException {
-        try (BufferedReader in =
-            new BufferedReader(new InputStreamReader(new URL("https://checkip.amazonaws.com").openStream()))) {
-            return in.readLine();
-        }
-    }
-    
-    /**
-     * Gets Silicon Valley location. Used as default value for each example service.
-     * This default location definition added here just for accumulating all GEO manipulation logic in one class.
-     *
-     * @return Silicon Valley location.
-     */
-    public GeoData getSiliconValley() {
-        GeoData geo = new GeoData();
-        
-        geo.setCityName("");
-        geo.setCountryName("United States");
-        geo.setTimezoneName("America/Los_Angeles");
-        geo.setTimezoneName("America/Los_Angeles");
-        geo.setLatitude(37.7749);
-        geo.setLongitude(122.4194);
-        
-        return geo;
-    }
-}
diff --git a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/keycdn/Response.java b/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/keycdn/Response.java
deleted file mode 100644
index 147b55a6..00000000
--- a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/keycdn/Response.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.examples.time.utils.keycdn;
-
-/**
- * Service https://tools.keycdn.com/geo response part bean.
- */
-class Response {
-    private String status;
-    private String description;
-    private ResponseData data;
-
-    /**
-     * Gets response status.
-     *
-     * @return Response status.
-     */
-    public String getStatus() {
-        return status;
-    }
-
-    /**
-     * Sets response status.
-     *
-     * @param status Response status to set.
-     */
-    public void setStatus(String status) {
-        this.status = status;
-    }
-
-    /**
-     * Gets response data.
-     *
-     * @return Response data.
-     */
-    public ResponseData getData() {
-        return data;
-    }
-
-    /**
-     * Sets response data.
-     *
-     * @param data Response data to set.
-     */
-    public void setData(ResponseData data) {
-        this.data = data;
-    }
-
-    /**
-     * Gets response description.
-     *
-     * @return Response description.
-     */
-    public String getDescription() {
-        return description;
-    }
-
-    /**
-     * Sets response description.
-     *
-     * @param description Response description to set.
-     */
-    public void setDescription(String description) {
-        this.description = description;
-    }
-}
diff --git a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/keycdn/ResponseData.java b/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/keycdn/ResponseData.java
deleted file mode 100644
index a130d8ae..00000000
--- a/nlpcraft-examples/time/src/main/java/org/apache/nlpcraft/examples/time/utils/keycdn/ResponseData.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.examples.time.utils.keycdn;
-
-/**
- * Service https://tools.keycdn.com/geo response bean.
- */
-class ResponseData {
-    private GeoData geo;
-
-    /**
-     * Gets geo data holder.
-     *
-     * @return Geo data holder.
-     */
-    public GeoData getGeo() {
-        return geo;
-    }
-
-    /**
-     * Sets get data holder.
-     *
-     * @param geo Geo data holder to set.
-     */
-    public void setGeo(GeoData geo) {
-        this.geo = geo;
-    }
-}
diff --git a/nlpcraft-examples/time/src/main/scala/org/apache/nlpcraft/examples/time/TimeModel.scala b/nlpcraft-examples/time/src/main/scala/org/apache/nlpcraft/examples/time/TimeModel.scala
new file mode 100644
index 00000000..da686af7
--- /dev/null
+++ b/nlpcraft-examples/time/src/main/scala/org/apache/nlpcraft/examples/time/TimeModel.scala
@@ -0,0 +1,123 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nlpcraft.examples.time
+
+import com.fasterxml.jackson.core.JsonProcessingException
+import com.fasterxml.jackson.databind.ObjectMapper
+import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
+import org.apache.nlpcraft.*
+import org.apache.nlpcraft.annotations.*
+import org.apache.nlpcraft.examples.time.utils.cities.*
+import org.apache.nlpcraft.examples.time.utils.keycdn.GeoManager
+import org.apache.nlpcraft.internal.util.NCResourceReader
+import org.apache.nlpcraft.nlp.entity.parser.NCOpenNLPEntityParser
+
+import java.time.format.DateTimeFormatter
+import java.time.format.FormatStyle.MEDIUM
+import java.time.*
+
+@NCIntent("fragment=city term(city)~{# == 'opennlp:location'}")
+@NCIntent("intent=intent2 term~{# == 'x:time'} fragment(city)")
+@NCIntent("intent=intent1 term={# == 'x:time'}")
+class TimeModel extends NCModelAdapter(
+    new NCModelConfig("nlpcraft.time.ex", "Time Example Model", "1.0"),
+    new NCPipelineBuilder().
+        withSemantic("en", "time_model.yaml").
+        withEntityParser(NCOpenNLPEntityParser(NCResourceReader.getPath("opennlp/en-ner-location.bin"))).
+        build
+):
+    // Medium data formatter.
+    private val FMT: DateTimeFormatter = DateTimeFormatter.ofLocalizedDateTime(MEDIUM)
+
+    // Map of cities and their geo and timezone information.
+    private val citiesData: Map[City, CityData] = CitiesDataProvider.get
+
+    /**
+      * Gets YAML query result.
+      *
+      * @param city  Detected city.
+      * @param cntry Detected country.
+      * @param tmz Timezone ID.
+      * @param lat City latitude.
+      * @param lon City longitude. */
+    private def mkResult(city: String, cntry: String, tmz: String, lat: Double, lon: Double): NCResult =
+        val m =
+            Map(
+                "city" -> capitalize(city),
+                "country" -> capitalize(cntry),
+                "timezone" -> tmz,
+                "lat" -> lat,
+                "lon" -> lon,
+                "localTime" -> ZonedDateTime.now(ZoneId.of(tmz)).format(FMT)
+            )
+
+        try
+            new NCResult(new ObjectMapper(new YAMLFactory).writeValueAsString(m), NCResultType.ASK_RESULT)
+        catch
+            case e: JsonProcessingException => throw new RuntimeException("YAML conversion error.", e)
+
+    /**
+      *
+      * @param s
+      * @return */
+    private def capitalize(s: String): String =
+        if s == null || s.isEmpty then s else s.substring(0, 1).toUpperCase + s.substring(1, s.length)
+
+    /**
+      * Callback on remote time intent match.
+      *
+      * @param cityEnt Token for 'geo' term.
+      * @return Query result. */
+    @NCIntentRef("intent2")
+    @NCIntentSample(Array(
+        "What time is it now in New York City?",
+        "What's the current time in Moscow?",
+        "Show me time of the day in London.",
+        "Can you please give me the Tokyo's current date and time."
+    ))
+    private def onRemoteMatch(@NCIntentTerm("city") cityEnt: NCEntity): NCResult =
+        val cityName: String = cityEnt.mkText
+
+        // We don't have timezone mapping for parsed GEO location.
+        // Instead of defaulting to a local time - we reject with a specific error message for cleaner UX.
+
+        val (city, data) = citiesData.find(_._1.name.equalsIgnoreCase(cityName)).getOrElse(throw new NCRejection(String.format("No timezone mapping for %s.", cityName)))
+
+        mkResult(city.name, city.country, data.timezone, data.latitude, data.longitude);
+
+
+    /**
+      * Callback on local time intent match.
+      *
+      * @param ctx Intent solver context.
+      * @return Query result. */
+    @NCIntentRef("intent1")
+    @NCIntentSample(Array("What's the local time?"))
+    private def onLocalMatch(ctx: NCIntentMatch): NCResult =  // NOTE:
+        // We need to have two intents vs. one intent with an optional GEO. The reason is that
+        // first intent isn't using the conversation to make sure we can always ask
+        // for local time **no matter** what was asked before... Note that non-conversational
+        // intent always "wins" over the conversational one given otherwise equal weight because
+        // non-conversational intent is more specific (i.e. using only the most current user input).
+        // Check for exactly one 'x:time' token **without** looking into the conversation.
+        // That's an indication of asking for local time only.
+        // Get local GEO data from sentence metadata defaulting to
+        // Silicon Valley location in case we are missing that info.
+        val geo = GeoManager.get(ctx.getContext.getRequest).getOrElse(GeoManager.getSiliconValley)
+
+        mkResult(geo.city, geo.country_name, geo.timezone, geo.latitude, geo.longitude)
+
diff --git a/nlpcraft-examples/lightswitch/src/test/java/org/apache/nlpcraft/examples/lightswitch/NCModelValidationSpec.scala b/nlpcraft-examples/time/src/main/scala/org/apache/nlpcraft/examples/time/utils/cities/CitiesDataProvider.scala
similarity index 50%
copy from nlpcraft-examples/lightswitch/src/test/java/org/apache/nlpcraft/examples/lightswitch/NCModelValidationSpec.scala
copy to nlpcraft-examples/time/src/main/scala/org/apache/nlpcraft/examples/time/utils/cities/CitiesDataProvider.scala
index 689f2f92..97155720 100644
--- a/nlpcraft-examples/lightswitch/src/test/java/org/apache/nlpcraft/examples/lightswitch/NCModelValidationSpec.scala
+++ b/nlpcraft-examples/time/src/main/scala/org/apache/nlpcraft/examples/time/utils/cities/CitiesDataProvider.scala
@@ -14,28 +14,25 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package org.apache.nlpcraft.examples.time.utils.cities
 
-package org.apache.nlpcraft.examples.lightswitch
-
-import org.apache.nlpcraft.*
-import org.junit.jupiter.api.*
+import org.apache.nlpcraft.NCException
 
+import java.io.*
+import java.util.Objects
 import scala.util.Using
 
-/**
-  * JUnit models validation.
-  */
-class NCModelValidationSpec:
-    private def test(mdl: NCModel): Unit = Using.resource(new NCModelClient(mdl)) { _.validateSamples() }
-
-    @Test
-    def testJava(): Unit = test(new LightSwitchJavaModel())
+case class City(name: String, country: String)
+case class CityData(timezone: String, latitude: Double, longitude: Double)
 
-    @Test
-    def testGroovy(): Unit = test(new LightSwitchGroovyModel())
+object CitiesDataProvider:
+    def get: Map[City, CityData] =
+        def convert(arr: Array[String]): (City, CityData) =
+            City(arr(0), arr(1)) -> CityData(arr(2), arr(3).toDouble, arr(4).toDouble)
 
-    @Test
-    def testKotlin(): Unit = test(new LightSwitchKotlinModel())
+        try
+            scala.io.Source.fromResource("cities_timezones.txt", Thread.currentThread().getContextClassLoader).getLines().
+                map(_.strip).filter(p => p.nonEmpty && !p.startsWith("#")).map(p => convert(p.split("\t"))).toMap
+        catch
+            case e: IOException => throw new NCException("Failed to read data file.", e)
 
-    @Test
-    def testScala(): Unit = test(new LightSwitchScalaModel())
diff --git a/nlpcraft-examples/time/src/main/scala/org/apache/nlpcraft/examples/time/utils/keycdn/GeoManager.scala b/nlpcraft-examples/time/src/main/scala/org/apache/nlpcraft/examples/time/utils/keycdn/GeoManager.scala
new file mode 100644
index 00000000..4be4d224
--- /dev/null
+++ b/nlpcraft-examples/time/src/main/scala/org/apache/nlpcraft/examples/time/utils/keycdn/GeoManager.scala
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nlpcraft.examples.time.utils.keycdn
+
+import com.google.gson.Gson
+import com.google.gson.annotations.SerializedName
+import org.apache.nlpcraft.NCRequest
+
+import java.io.*
+import java.net.*
+import java.text.MessageFormat
+import java.util.zip.GZIPInputStream
+import scala.io.Source
+import scala.util.Using
+
+// Such field names required by 'keycdn.com' service response.
+case class ResponseGeoData(country_name: String, city: String, latitude: Double, longitude: Double, timezone: String)
+case class ResponseData(geo: ResponseGeoData)
+case class Response(status: String, description: String, data: ResponseData)
+
+object GeoManager:
+    private val URL: String = "https://tools.keycdn.com/geo.json?host="
+    private val GSON: Gson = new Gson
+
+    private val cache = collection.mutable.HashMap.empty[String, ResponseGeoData]
+
+    private var externalIp: Option[String] = None
+
+    /**
+      * Gets optional geo data by given sentence.
+      *
+      * @param sen Sentence.
+      * @return Geo data. Optional. */
+    def get(sen: NCRequest): Option[ResponseGeoData] =
+        try
+            externalIp match
+                case Some(_) => // No-op.
+                case None =>
+                    try externalIp = Some(getExternalIp)
+                    catch
+                        case _: IOException => // No-op.
+
+            externalIp match
+                case Some(ip) =>
+                    cache.get(ip) match
+                        case Some(geo) => Some(geo)
+                        case None =>
+                            val conn = new URL(URL + ip).openConnection.asInstanceOf[HttpURLConnection]
+
+                            // This service requires "User-Agent" property with its own format.
+                            conn.setRequestProperty("User-Agent", "keycdn-tools:https://nlpcraft.apache.org")
+
+                            val enc = conn.getContentEncoding
+
+                            Using.resource(conn.getInputStream) { in =>
+                                val stream = if enc != null && enc == "gzip" then new GZIPInputStream(in) else in
+
+                                val resp = GSON.fromJson(new BufferedReader(new InputStreamReader(stream)), classOf[Response])
+
+                                if resp.status != "success" then
+                                    throw new IOException(MessageFormat.format("Unexpected response [status={0}, description={1}]", resp.status, resp.description))
+
+                                cache.put(ip, resp.data.geo)
+
+                                Some(resp.data.geo)
+                            }
+                case None =>
+                    System.err.println("External IP cannot be detected for localhost.")
+                    None
+        catch
+            case e: Exception =>
+                System.err.println(MessageFormat.format("Unable to answer due to IP location finder (keycdn) error for host: {0}", externalIp))
+                e.printStackTrace(System.err)
+                None
+
+    /**
+      * Gets external IP.
+      *
+      * @return External IP.
+      * @throws IOException If any errors occur. */
+    private def getExternalIp: String =
+        Using.resource(Source.fromURL(new URL("https://checkip.amazonaws.com"))) { src =>
+            src.getLines().toList.head
+        }
+
+    /**
+      * Gets Silicon Valley location. Used as default value for each example service.
+      * This default location definition added here just for accumulating all GEO manipulation logic in one class.
+      *
+      * @return Silicon Valley location. */
+    def getSiliconValley: ResponseGeoData =
+        ResponseGeoData(
+            country_name = "United States", city = "", latitude = 37.7749, longitude = 122.4194, timezone = "America/Los_Angeles"
+        )
diff --git a/nlpcraft-stanford/src/main/java/org/apache/nlpcraft/nlp/entity/parser/stanford/NCStanfordNLPEntityParser.java b/nlpcraft-stanford/src/main/java/org/apache/nlpcraft/nlp/entity/parser/stanford/NCStanfordNLPEntityParser.java
deleted file mode 100644
index b0cf4703..00000000
--- a/nlpcraft-stanford/src/main/java/org/apache/nlpcraft/nlp/entity/parser/stanford/NCStanfordNLPEntityParser.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.nlp.entity.parser.stanford;
-
-import edu.stanford.nlp.pipeline.StanfordCoreNLP;
-import org.apache.nlpcraft.NCEntity;
-import org.apache.nlpcraft.NCEntityParser;
-import org.apache.nlpcraft.NCModelConfig;
-import org.apache.nlpcraft.NCRequest;
-import org.apache.nlpcraft.NCToken;
-import org.apache.nlpcraft.nlp.entity.parser.stanford.impl.NCStanfordNLPEntityParserImpl;
-
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Objects;
-import java.util.Set;
-
-/**
- *
- * Generates entities with
- *  - ID `stanford:{name}` where 'name' is element name from configured StanfordCoreNLP instance, from supported set
- *  - property `stanford:{name}:confidence`, where confidence is double value between 0 and 1. Optional.
- *  - property `stanford:{name}:nne`, where nne is normalized value. Optional.
- */
-public class NCStanfordNLPEntityParser implements NCEntityParser {
-    private final NCStanfordNLPEntityParserImpl impl;
-
-    /**
-     *
-     * Requires configured StanfordCoreNLP instance.
-     * Example:
-     *   Properties props = new Properties()
-     *   props.setProperty("annotators", "tokenize, ssplit, pos, lemma, ner")
-     *   StanfordCoreNLP stanford = new StanfordCoreNLP(props)
-     * Look at https://stanfordnlp.github.io/CoreNLP/ner.html#java-api-example for more details.
-     * @param stanford
-     * @param supported
-     */
-    public NCStanfordNLPEntityParser(StanfordCoreNLP stanford, Set<String> supported) {
-        Objects.requireNonNull(stanford, "Stanford instance cannot be null.");
-        Objects.requireNonNull(supported, "Supported elements set cannot be null.");
-
-        this.impl = new NCStanfordNLPEntityParserImpl(stanford, supported);
-    }
-
-    /**
-     *
-     * @param stanford
-     * @param supported
-     */
-    public NCStanfordNLPEntityParser(StanfordCoreNLP stanford, String... supported) {
-        Objects.requireNonNull(stanford, "Stanford instance cannot be null.");
-        Objects.requireNonNull(supported, "Supported element cannot be null.");
-
-        this.impl = new NCStanfordNLPEntityParserImpl(stanford, new HashSet<>(Arrays.asList(supported)));
-    }
-
-    @Override
-    public List<NCEntity> parse(NCRequest req, NCModelConfig cfg, List<NCToken> toks) {
-        return impl.parse(req, cfg, toks);
-    }
-
-    @Override
-    public void onStart(NCModelConfig cfg) {
-        impl.onStart(cfg);
-    }
-
-    @Override
-    public void onStop(NCModelConfig cfg) {
-        impl.onStop(cfg);
-    }
-}
diff --git a/nlpcraft-stanford/src/main/java/org/apache/nlpcraft/nlp/token/parser/stanford/NCStanfordNLPTokenParser.java b/nlpcraft-stanford/src/main/java/org/apache/nlpcraft/nlp/token/parser/stanford/NCStanfordNLPTokenParser.java
deleted file mode 100644
index 78cd92ed..00000000
--- a/nlpcraft-stanford/src/main/java/org/apache/nlpcraft/nlp/token/parser/stanford/NCStanfordNLPTokenParser.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.nlp.token.parser.stanford;
-
-import edu.stanford.nlp.pipeline.StanfordCoreNLP;
-import org.apache.nlpcraft.NCToken;
-import org.apache.nlpcraft.NCTokenParser;
-import org.apache.nlpcraft.nlp.token.parser.stanford.impl.NCStanfordNLPTokenParserImpl;
-
-import java.util.List;
-import java.util.Objects;
-
-/**
- * TODO:
- * Note that this rokenizer also addes 2 properties into parsed token: lemma and pos
- */
-public class NCStanfordNLPTokenParser implements NCTokenParser {
-    private final NCStanfordNLPTokenParserImpl impl;
-
-    /**
-     *
-     * Requires configured StanfordCoreNLP instance.
-     * Example:
-     *   Properties props = new Properties()
-     *   props.setProperty("annotators", "tokenize, ssplit, pos, lemma, ner")
-     *   StanfordCoreNLP stanford = new StanfordCoreNLP(props)
-     * Look at https://stanfordnlp.github.io/CoreNLP/ner.html#java-api-example for more details.
-     *
-     * @param stanford
-     */
-    public NCStanfordNLPTokenParser(StanfordCoreNLP stanford) {
-        Objects.requireNonNull(stanford, "Stanford instance cannot be null.");
-
-        impl = new NCStanfordNLPTokenParserImpl(stanford);
-    }
-
-    @Override
-    public List<NCToken> tokenize(String text) {
-        return impl.tokenize(text);
-    }
-}
diff --git a/nlpcraft-stanford/src/main/java/org/apache/nlpcraft/nlp/entity/parser/stanford/impl/NCStanfordNLPEntityParserImpl.scala b/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/stanford/NCStanfordNLPEntityParser.scala
similarity index 78%
rename from nlpcraft-stanford/src/main/java/org/apache/nlpcraft/nlp/entity/parser/stanford/impl/NCStanfordNLPEntityParserImpl.scala
rename to nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/stanford/NCStanfordNLPEntityParser.scala
index d308a74c..2426fcda 100644
--- a/nlpcraft-stanford/src/main/java/org/apache/nlpcraft/nlp/entity/parser/stanford/impl/NCStanfordNLPEntityParserImpl.scala
+++ b/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/stanford/NCStanfordNLPEntityParser.scala
@@ -15,15 +15,13 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.nlp.entity.parser.stanford.impl
+package org.apache.nlpcraft.nlp.entity.parser.stanford
 
 import edu.stanford.nlp.ling.CoreAnnotations.NormalizedNamedEntityTagAnnotation
 import edu.stanford.nlp.pipeline.*
 import org.apache.nlpcraft.*
 
-import java.util
-import java.util.stream.Collectors
-import java.util.{Properties, ArrayList as JAList, List as JList, Set as JSet}
+import java.util.Objects
 import scala.collection.mutable
 import scala.jdk.CollectionConverters.*
 
@@ -32,16 +30,15 @@ import scala.jdk.CollectionConverters.*
   * @param stanford
   * @param supported
   */
-class NCStanfordNLPEntityParserImpl(stanford: StanfordCoreNLP, supported: JSet[String]) extends NCEntityParser:
-    require(stanford != null)
-    require(supported != null)
+class NCStanfordNLPEntityParser(stanford: StanfordCoreNLP, supported: Set[String]) extends NCEntityParser:
+    Objects.requireNonNull(stanford, "Stanford instance cannot be null.");
+    Objects.requireNonNull(supported, "Supported elements set cannot be null.");
 
-    override def parse(req: NCRequest, cfg: NCModelConfig, toksList: JList[NCToken]): JList[NCEntity] =
-        val toks = toksList.asScala.toSeq
+    override def parse(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): List[NCEntity] =
         val doc = new CoreDocument(req.getText)
         stanford.annotate(doc)
 
-        val res = new JAList[NCEntity]()
+        val res = scala.collection.mutable.ArrayBuffer.empty[NCEntity]
 
         for (e <- doc.entityMentions().asScala)
             val typ = e.entityType().toLowerCase
@@ -66,13 +63,12 @@ class NCStanfordNLPEntityParserImpl(stanford: StanfordCoreNLP, supported: JSet[S
                     )
 
                     if entToks.nonEmpty then
-                        res.add(
+                        res +=
                             new NCPropertyMapAdapter with NCEntity:
                                 props.foreach { (k, v) => put(s"stanford:$typ:$k", v) }
 
-                                override val getTokens: JList[NCToken] = entToks.asJava
+                                override val getTokens: List[NCToken] = entToks
                                 override val getRequestId: String = req.getRequestId
                                 override val getId: String = s"stanford:$typ"
-                            )
 
-        res
\ No newline at end of file
+        res.toList
\ No newline at end of file
diff --git a/nlpcraft-stanford/src/main/java/org/apache/nlpcraft/nlp/token/parser/stanford/impl/NCStanfordNLPTokenParserImpl.scala b/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/nlp/token/parser/stanford/NCStanfordNLPTokenParser.scala
similarity index 79%
rename from nlpcraft-stanford/src/main/java/org/apache/nlpcraft/nlp/token/parser/stanford/impl/NCStanfordNLPTokenParserImpl.scala
rename to nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/nlp/token/parser/stanford/NCStanfordNLPTokenParser.scala
index c9cec5df..0af6d272 100644
--- a/nlpcraft-stanford/src/main/java/org/apache/nlpcraft/nlp/token/parser/stanford/impl/NCStanfordNLPTokenParserImpl.scala
+++ b/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/nlp/token/parser/stanford/NCStanfordNLPTokenParser.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.nlp.token.parser.stanford.impl
+package org.apache.nlpcraft.nlp.token.parser.stanford
 
 import edu.stanford.nlp.ling.*
 import edu.stanford.nlp.ling.CoreAnnotations.*
@@ -24,9 +24,7 @@ import edu.stanford.nlp.util.*
 import org.apache.nlpcraft.*
 
 import java.io.StringReader
-import java.util
-import java.util.stream.Collectors
-import java.util.{Properties, List as JList}
+import java.util.Objects
 import scala.collection.mutable
 import scala.jdk.CollectionConverters.*
 
@@ -34,18 +32,18 @@ import scala.jdk.CollectionConverters.*
   *
   * @param stanford
   */
-class NCStanfordNLPTokenParserImpl(stanford: StanfordCoreNLP) extends NCTokenParser:
-    require(stanford != null)
+class NCStanfordNLPTokenParser(stanford: StanfordCoreNLP) extends NCTokenParser:
+    Objects.requireNonNull(stanford, "Stanford instance cannot be null.")
 
     private def nvl(v: String, dflt : => String): String = if v != null then v else dflt
 
-    override def tokenize(text: String): JList[NCToken] =
+    override def tokenize(text: String): List[NCToken] =
         val doc = new CoreDocument(text)
         stanford.annotate(doc)
         val ann = doc.annotation().get(classOf[SentencesAnnotation])
         if ann == null then E("Sentence annotation not found.")
 
-        val toks = ann.asScala.flatMap(_.asInstanceOf[ArrayCoreMap].get(classOf[TokensAnnotation]).asScala).
+        ann.asScala.flatMap(_.asInstanceOf[ArrayCoreMap].get(classOf[TokensAnnotation]).asScala).
             zipWithIndex.map { (t, idx) =>
                 val txt = t.originalText()
 
@@ -56,6 +54,4 @@ class NCStanfordNLPTokenParserImpl(stanford: StanfordCoreNLP) extends NCTokenPar
                     override val getIndex: Int = idx
                     override val getStartCharIndex: Int = t.beginPosition()
                     override val getEndCharIndex: Int = t.endPosition()
-            }.toSeq
-
-        toks.asJava
\ No newline at end of file
+            }.toList
\ No newline at end of file
diff --git a/nlpcraft-stanford/src/test/scala/org/apache/nlpcraft/nlp/entity/parser/stanford/NCStanfordNLPEntityParserSpec.scala b/nlpcraft-stanford/src/test/scala/org/apache/nlpcraft/nlp/entity/parser/stanford/NCStanfordNLPEntityParserSpec.scala
index 13eeabda..13201f29 100644
--- a/nlpcraft-stanford/src/test/scala/org/apache/nlpcraft/nlp/entity/parser/stanford/NCStanfordNLPEntityParserSpec.scala
+++ b/nlpcraft-stanford/src/test/scala/org/apache/nlpcraft/nlp/entity/parser/stanford/NCStanfordNLPEntityParserSpec.scala
@@ -17,27 +17,23 @@
 
 package org.apache.nlpcraft.nlp.entity.parser.stanford
 
-import org.apache.nlpcraft.nlp.util.NCTestUtils
-import org.apache.nlpcraft.nlp.util.NCTestRequest
+import org.apache.nlpcraft.nlp.util.{CFG, *}
 import org.apache.nlpcraft.nlp.util.stanford.*
 import org.junit.jupiter.api.Test
-
-import scala.jdk.CollectionConverters.*
-
 /**
   *
   */
 class NCStanfordNLPEntityParserSpec:
-    private val parser = NCStanfordNLPEntityParser(STANFORD, Set("city", "date", "number", "email").asJava)
+    private val parser = NCStanfordNLPEntityParser(STANFORD, Set("city", "date", "number", "email"))
 
     @Test
     def test(): Unit =
         val txt = "Los Angeles, 23 August, 23 and sergeykamov@apache.org, tomorrow"
 
         val toks = TOK_STANFORD_PARSER.tokenize(txt)
-        NCTestUtils.printTokens(toks.asScala.toSeq)
+        NCTestUtils.printTokens(toks)
 
         val res = parser.parse(NCTestRequest(txt), CFG, toks)
-        NCTestUtils.printEntities(txt, res.asScala.toSeq)
+        NCTestUtils.printEntities(txt, res)
 
-        require(res.size() == 5)
\ No newline at end of file
+        require(res.size == 5)
\ No newline at end of file
diff --git a/nlpcraft-stanford/src/test/scala/org/apache/nlpcraft/nlp/token/parser/stanford/NCStanfordNLPTokenParserSpec.scala b/nlpcraft-stanford/src/test/scala/org/apache/nlpcraft/nlp/token/parser/stanford/NCStanfordNLPTokenParserSpec.scala
index 157d7d26..954078c2 100644
--- a/nlpcraft-stanford/src/test/scala/org/apache/nlpcraft/nlp/token/parser/stanford/NCStanfordNLPTokenParserSpec.scala
+++ b/nlpcraft-stanford/src/test/scala/org/apache/nlpcraft/nlp/token/parser/stanford/NCStanfordNLPTokenParserSpec.scala
@@ -31,7 +31,7 @@ import scala.jdk.CollectionConverters.*
 class NCStanfordNLPTokenParserSpec:
     @Test
     def test(): Unit =
-        val toks = TOK_STANFORD_PARSER.tokenize("I had a lunch with brand names 'AAA'").asScala.toSeq
+        val toks = TOK_STANFORD_PARSER.tokenize("I had a lunch with brand names 'AAA'")
 
         require(toks.sizeIs > 1)
         NCTestUtils.printTokens(toks)
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/package-info.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCCallbackData.scala
similarity index 76%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/package-info.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/NCCallbackData.scala
index 905402e1..2bc38c0b 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/package-info.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCCallbackData.scala
@@ -6,7 +6,7 @@
  * (the "License"); you may not use this file except in compliance with
  * the License.  You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -15,7 +15,9 @@
  * limitations under the License.
  */
 
-/**
- * Contains built-in entity parsers.
- */
-package org.apache.nlpcraft.nlp.entity.parser;
\ No newline at end of file
+package org.apache.nlpcraft
+
+trait NCCallbackData:
+    def getIntentId: String
+    def getCallbackArguments: List[List[NCEntity]]
+    def getCallback: List[List[NCEntity]] => NCResult
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCContext.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCContext.java
deleted file mode 100644
index 858f1c77..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCContext.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.util.Collection;
-import java.util.List;
-
-/**
- * A context containing a fully parsed data from the input query. 
- * 
- * @see NCModel#onContext(NCContext)
- */
-public interface NCContext {
-    /**
-     * Tests if given entity is part of the query this context is associated with.
-     *
-     * @param ent Entity to check.
-     */
-    boolean isOwnerOf(NCEntity ent);
-
-    /**
-     * Gets configuration of the model this context is associated with.
-     *
-     * @return Model.
-     */
-    NCModelConfig getModelConfig();
-
-    /**
-     * Gets user request container.
-     *
-     * @return User request.
-     */
-    NCRequest getRequest();
-
-    /**
-     * Gets current conversation for this context.
-     *
-     * @return Current conversation.
-     */
-    NCConversation getConversation();
-
-    /**
-     * Gets the list of parsing variants. Returned list always contains as least one parsing variant.
-     * 
-     * @return A non-empty list of parsing variants.
-     * @see NCModel#onVariant(NCVariant)
-     */
-    Collection<NCVariant> getVariants();
-
-    /**
-     * Gets the list of all tokens for the input query this context is associated with.
-     *
-     * @return List of tokens for this context. Can be empty but never {@code null}.
-     */
-    List<NCToken> getTokens();
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCContext.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCContext.scala
new file mode 100644
index 00000000..e2d3e5fb
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCContext.scala
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+/**
+  * A context containing a fully parsed data from the input query.
+  *
+  * @see NCModel#onContext(NCContext) */
+trait NCContext:
+    /**
+      * Tests if given entity is part of the query this context is associated with.
+      *
+      * @param ent Entity to check. */
+    def isOwnerOf(ent: NCEntity): Boolean
+
+    /**
+      * Gets configuration of the model this context is associated with.
+      *
+      * @return Model. */
+    def getModelConfig: NCModelConfig
+
+    /**
+      * Gets user request container.
+      *
+      * @return User request. */
+    def getRequest: NCRequest
+
+    /**
+      * Gets current conversation for this context.
+      *
+      * @return Current conversation. */
+    def getConversation: NCConversation
+
+    /**
+      * Gets the list of parsing variants. Returned list always contains as least one parsing variant.
+      *
+      * @return A non-empty list of parsing variants.
+      * @see NCModel#onVariant(NCVariant) */
+    def getVariants: List[NCVariant]
+
+    /**
+      * Gets the list of all tokens for the input query this context is associated with.
+      *
+      * @return List of tokens for this context. Can be empty but never {@code null}. */
+    def getTokens: List[NCToken]
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCConversation.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCConversation.java
deleted file mode 100644
index ab0fbb36..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCConversation.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.util.List;
-import java.util.function.Predicate;
-
-/**
- * Conversation container. Conversation is essentially a container for everything that should be implicitly remembered
- * during the active, ongoing conversation and forgotten once the conversation stops. Conversation contains the
- * following elements:
- * <ul>
- *     <li>List of entities defining a "short-term-memory (STM)" of this conversation.</li>
- *     <li>Chronological list of previously matched intents.</li>
- *     <li>Auto-expiring user data.</li>
- * </ul>
- * Note that the conversation is unique for given combination of user and data model.
- * <p>
- * Conversation management is based on idea of a short-term-memory (STM). STM can be viewed as a condensed
- * short-term history of the input for a given user and data model. Every submitted user request that wasn't
- * rejected is added to the conversation STM as a list of tokens. Existing STM tokens belonging to the same
- * group will be overridden by the more recent tokens from the same group. Note also that tokens in STM automatically
- * expire (i.e. context is "forgotten") after a certain period of time and/or based on the depth of the
- * conversation since the last mention.
- * <p>
- * You can also maintain user state-machine between requests using conversation's session. Conversation's {@link #getData() data} is
- * a mutable thread-safe container that can hold any arbitrary user data while supporting the same expiration logic as
- * the rest of the conversation elements (i.e. tokens and previously matched intent IDs).
- * <p>
- * Conversation expiration policy is configured by two configuration properties:
- * <ul>
- *     <li>{@link NCModelConfig#getConversationDepth()}</li>
- *     <li>{@link NCModelConfig#getConversationTimeout()}</li>
- * </ul>
- *
- * @see NCContext#getConversation()
- * @see NCModelConfig#getConversationDepth()
- * @see NCModelConfig#getConversationTimeout()
- */
-public interface NCConversation {
-    /**
-     * Gets user-defined as a mutable thread-safe property container. Note tha this container has the same expiration
-     * policy as the conversation it belongs to. Specifically, this returned container will be cleared when the
-     * conversation gets cleared automatically (by timeout or depth) or manually.
-     *
-     * @return User-defined conversation data container. Can be empty but never {@code null}.
-     */
-    NCPropertyMap getData();
-
-    /**
-     * Gets an ordered list of entities stored in the conversation STM for the current user and data model. Entities in
-     * the returned list are ordered by their conversational depth, i.e. the entities from more recent requests appear
-     * before entities from older requests.
-     * <p>
-     * Note that specific rules by which STM operates are undefined for the purpose of this function (i.e. callers
-     * should not rely on any observed behavior of how STM stores and evicts its content).
-     *
-     * @return List of entities for this conversation's STM. The list can be empty which indicates that conversation
-     * is brand new or expired - but never {@code null}.
-     */
-    List<NCEntity> getStm();
-
-    /**
-     * Gets the chronologically ordered list of previously matched intents sorted from oldest to newest
-     * for the current user.
-     *
-     * @return List of chronologically ordered previously matched dialog flow items.
-     */
-    List<NCDialogFlowItem> getDialogFlow();
-
-    /**
-     * Removes all entities satisfying given predicate from the conversation STM. This is particularly useful when the
-     * logic processing the user input makes an implicit assumption not present in the user input itself. Such
-     * assumption may alter the conversation (without having an explicit entities responsible for it) and therefore
-     * this method can be used to remove "stale" entities from conversation STM.
-     * <p>
-     * For example, in some cases the intent logic can assume the user current location as an implicit geographical
-     * location and therefore all existing geographical-related entities should be removed from the conversation
-     * STM to maintain correct context.
-     *
-     * @param filter Entity remove filter.
-     */
-    void clearStm(Predicate<NCEntity> filter);
-
-    /**
-     * Removes all previously matched intents using given dialog flow item predicate.
-     * <p>
-     * History of matched intents (i.e. the dialog flow) can be used in intent definition as part of its
-     * matching template. NLPCraft maintains the window of previously matched intents based on time, i.e.
-     * after certain period of time the oldest previously matched intents are forgotten and removed from
-     * dialog flow. This method allows explicitly clear previously matched intents from the
-     * dialog flow based on user logic other than time window.
-     *
-     * @param filter Dialog flow filter.
-     */
-    void clearDialog(Predicate<NCDialogFlowItem> filter);
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCConversation.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCConversation.scala
new file mode 100644
index 00000000..37cc0059
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCConversation.scala
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+/**
+  * Conversation container. Conversation is essentially a container for everything that should be implicitly remembered
+  * during the active, ongoing conversation and forgotten once the conversation stops. Conversation contains the
+  * following elements:
+  * <ul>
+  * <li>List of entities defining a "short-term-memory (STM)" of this conversation.</li>
+  * <li>Chronological list of previously matched intents.</li>
+  * <li>Auto-expiring user data.</li>
+  * </ul>
+  * Note that the conversation is unique for given combination of user and data model.
+  * <p>
+  * Conversation management is based on idea of a short-term-memory (STM). STM can be viewed as a condensed
+  * short-term history of the input for a given user and data model. Every submitted user request that wasn't
+  * rejected is added to the conversation STM as a list of tokens. Existing STM tokens belonging to the same
+  * group will be overridden by the more recent tokens from the same group. Note also that tokens in STM automatically
+  * expire (i.e. context is "forgotten") after a certain period of time and/or based on the depth of the
+  * conversation since the last mention.
+  * <p>
+  * You can also maintain user state-machine between requests using conversation's session. Conversation's {@link # getData ( ) data} is
+  * a mutable thread-safe container that can hold any arbitrary user data while supporting the same expiration logic as
+  * the rest of the conversation elements (i.e. tokens and previously matched intent IDs).
+  * <p>
+  * Conversation expiration policy is configured by two configuration properties:
+  * <ul>
+  * <li>{@link NCModelConfig# getConversationDepth ( )}</li>
+  * <li>{@link NCModelConfig# getConversationTimeout ( )}</li>
+  * </ul>
+  *
+  * @see NCContext#getConversation()
+  * @see NCModelConfig#getConversationDepth()
+  * @see NCModelConfig#getConversationTimeout() */
+trait NCConversation:
+    /**
+      * Gets user-defined as a mutable thread-safe property container. Note tha this container has the same expiration
+      * policy as the conversation it belongs to. Specifically, this returned container will be cleared when the
+      * conversation gets cleared automatically (by timeout or depth) or manually.
+      *
+      * @return User-defined conversation data container. Can be empty but never {@code null}. */
+    def getData: NCPropertyMap
+
+    /**
+      * Gets an ordered list of entities stored in the conversation STM for the current user and data model. Entities in
+      * the returned list are ordered by their conversational depth, i.e. the entities from more recent requests appear
+      * before entities from older requests.
+      * <p>
+      * Note that specific rules by which STM operates are undefined for the purpose of this function (i.e. callers
+      * should not rely on any observed behavior of how STM stores and evicts its content).
+      *
+      * @return List of entities for this conversation's STM. The list can be empty which indicates that conversation
+      * is brand new or expired - but never {@code null}. */
+    def getStm: List[NCEntity]
+
+    /**
+      * Gets the chronologically ordered list of previously matched intents sorted from oldest to newest
+      * for the current user.
+      *
+      * @return List of chronologically ordered previously matched dialog flow items. */
+    def getDialogFlow: List[NCDialogFlowItem]
+
+    /**
+      * Removes all entities satisfying given predicate from the conversation STM. This is particularly useful when the
+      * logic processing the user input makes an implicit assumption not present in the user input itself. Such
+      * assumption may alter the conversation (without having an explicit entities responsible for it) and therefore
+      * this method can be used to remove "stale" entities from conversation STM.
+      * <p>
+      * For example, in some cases the intent logic can assume the user current location as an implicit geographical
+      * location and therefore all existing geographical-related entities should be removed from the conversation
+      * STM to maintain correct context.
+      *
+      * @param filter Entity remove filter. */
+    def clearStm(filter: NCEntity => Boolean): Unit
+
+    /**
+      * Removes all previously matched intents using given dialog flow item predicate.
+      * <p>
+      * History of matched intents (i.e. the dialog flow) can be used in intent definition as part of its
+      * matching template. NLPCraft maintains the window of previously matched intents based on time, i.e.
+      * after certain period of time the oldest previously matched intents are forgotten and removed from
+      * dialog flow. This method allows explicitly clear previously matched intents from the
+      * dialog flow based on user logic other than time window.
+      *
+      * @param filter Dialog flow filter. */
+    def clearDialog(filter: NCDialogFlowItem => Boolean): Unit
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCCuration.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCCuration.java
deleted file mode 100644
index 04acff78..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCCuration.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-/**
- * A type of rejection indicating that human curation is required. Curation is typically an indication that input
- * query is likely valid but needs a human correction like a type fix, slang resolution, etc.
- * <p>
- * Note that NLPCraft does not handle the curation process itself but only allows to indicate the curation
- * request by throwing this exception. Curation is a special type of rejection. User code is responsible for the actual
- * handling of the curation logic.
- */
-public class NCCuration extends NCRejection {
-    /**
-     * Creates new curation exception with given message.
-     *
-     * @param msg Curation message.
-     */
-    public NCCuration(String msg) {
-        super(msg);
-    }
-
-    /**
-     * Creates new curation exception with given message and cause.
-     *
-     * @param msg Curation message.
-     * @param cause Cause of this exception.
-     */
-    public NCCuration(String msg, Throwable cause) {
-        super(msg, cause);
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/NCIDLEntity.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCCuration.scala
similarity index 57%
copy from nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/NCIDLEntity.scala
copy to nlpcraft/src/main/scala/org/apache/nlpcraft/NCCuration.scala
index e41aaee3..7819b585 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/NCIDLEntity.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCCuration.scala
@@ -15,16 +15,14 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.internal.intent
-
-import org.apache.nlpcraft.*
-
-import scala.jdk.CollectionConverters.*
+package org.apache.nlpcraft
 
 /**
-  *
-  * @param impl
-  * @param index
+  * A type of rejection indicating that human curation is required. Curation is typically an indication that input
+  * query is likely valid but needs a human correction like a type fix, slang resolution, etc.
+  * <p>
+  * Note that NLPCraft does not handle the curation process itself but only allows to indicate the curation
+  * request by throwing this exception. Curation is a special type of rejection. User code is responsible for the actual
+  * handling of the curation logic.
   */
-case class NCIDLEntity(impl: NCEntity, index: Int):
-    lazy val text: String = impl.mkText()
+class NCCuration(msg: String, cause: Throwable = null) extends NCRejection(msg, cause)
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCDialogFlowItem.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCDialogFlowItem.java
deleted file mode 100644
index f293fda0..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCDialogFlowItem.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-/**
- * An item of the dialog flow. Dialog flow is a chronologically ordered list of dialog flow items. Each item
- * represents a snapshot of winning intent's match and its associated data. List of dialog flow items
- * is passed into a custom user-defined dialog flow match method.
- *
- * @see NCConversation#getDialogFlow()
- */
-public interface NCDialogFlowItem {
-    /**
-     * Gets the intent match container.
-     *
-     * @return Intent match container.
-     */
-    NCIntentMatch getIntentMatch();
-
-    /**
-     * Gets the input request descriptor.
-     *
-     * @return Input request descriptor.
-     */
-    NCRequest getRequest();
-
-    /**
-     * Gets the winning intent's result.
-     *
-     * @return Winning intent's result.
-     */
-    NCResult getResult();
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticElement.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCDialogFlowItem.scala
similarity index 51%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticElement.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/NCDialogFlowItem.scala
index f8bdedb1..294cc146 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticElement.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCDialogFlowItem.scala
@@ -15,51 +15,33 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.nlp.entity.parser.semantic;
-
-import java.util.Collections;
-import java.util.Map;
-import java.util.Set;
+package org.apache.nlpcraft
 
 /**
- *
- */
-public interface NCSemanticElement {
-    /**
-     *
-     * @return
-     */
-    String getId();
-
-    /**
-     *
-     * @return
-     */
-    default Set<String> getGroups() {
-        return Collections.singleton(getId());
-    }
-
+  * An item of the dialog flow. Dialog flow is a chronologically ordered list of dialog flow items. Each item
+  * represents a snapshot of winning intent's match and its associated data. List of dialog flow items
+  * is passed into a custom user-defined dialog flow match method.
+  *
+  * @see NCConversation#getDialogFlow()
+  */
+trait NCDialogFlowItem:
     /**
-     *
-     * @return
-     */
-    default Map<String, Set<String>> getValues() {
-        return Collections.emptyMap();
-    }
+      * Gets the intent match container.
+      *
+      * @return Intent match container.
+      */
+    def getIntentMatch: NCIntentMatch
 
     /**
-     *
-     * @return
-     */
-    default Set<String> getSynonyms() {
-        return Collections.emptySet();
-    }
+      * Gets the input request descriptor.
+      *
+      * @return Input request descriptor.
+      */
+    def getRequest: NCRequest
 
     /**
-     *
-     * @return
-     */
-    default Map<String, Object> getProperties() {
-        return Collections.emptyMap();
-    }
-}
\ No newline at end of file
+      * Gets the winning intent's result.
+      *
+      * @return Winning intent's result.
+      */
+    def getResult: NCResult
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntity.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntity.java
deleted file mode 100644
index ab0137e9..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntity.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.util.Collections;
-import java.util.List;
-import java.util.Set;
-import java.util.stream.Collectors;
-
-/**
- * An enenity is a collection if one or more {@link NCToken tokens}. An entity typically has a
- * consistent semantic meaning and usually denotes a real-world object, such as persons, locations, number,
- * date and time, organizations, products, etc. - where such objects can be abstract or have a physical existence.
- * Entities are produced by {@link NCEntityParser}. See {@link NCPipeline} for documentation on the entities in the
- * overall processing pipeline.
- *
- * <span class="hdr">Metadata</span>
- * Note that both {@link NCToken} and {@link NCEntity} interfaces extend {@link NCPropertyMap} interface
- * that allows them to store custom metadata properties. Parser, enrichers and validators for tokens
- * and entities use this capability to store and check their properties in tokens and entities.
- *
- * @see NCEntity
- * @see NCToken
- * @see NCTokenParser
- * @see NCTokenEnricher
- * @see NCTokenValidator
- * @see NCEntityParser
- * @see NCEntityEnricher
- * @see NCEntityValidator
- * @see NCEntityMapper
- */
-public interface NCEntity extends NCPropertyMap {
-    /**
-     * Gets the list of tokens this entity is comprised of. Ruturned list is never empty or {@code null}.
-     *
-     * @return List of tokens that are part of this entity.
-     */
-    List<NCToken> getTokens();
-
-    /**
-     * Joins all tokens' text with trimming using space as a delimiter. This function does not cache the
-     * result and performs text construction on each call. Make sure to cache the result to avoid
-     * unnecessary parasitic workload if and when method {@link #getTokens()} does not change.
-     *
-     * @return Constructuted textual representation of this entity. Note that returned value is not
-     *      cached and created anew every time this method is called.
-     */
-    default String mkText() {
-        return getTokens().stream().map(s -> s.getText().trim()).collect(Collectors.joining(" ")).trim();
-    }
-
-    /**
-     * Gets ID of the request this entity is part of.
-     *
-     * @return ID of the request this entity is part of.
-     */
-    String getRequestId();
-
-    /**
-     * Gets optional set of groups this entity belongs to.
-     *
-     * @return Optional set of groups this entity belongs to. Returned set can be empty but never {@code null}.
-     */
-    default Set<String> getGroups() { return Collections.singleton(getId()); }
-
-    /**
-     * Gets globally unique ID of this entity.
-     *
-     * @return Globally unique ID of this entity.
-     */
-    String getId();
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntity.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntity.scala
new file mode 100644
index 00000000..8360905c
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntity.scala
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+/**
+  * An enenity is a collection if one or more {@link NCToken tokens}. An entity typically has a
+  * consistent semantic meaning and usually denotes a real-world object, such as persons, locations, number,
+  * date and time, organizations, products, etc. - where such objects can be abstract or have a physical existence.
+  * Entities are produced by {@link NCEntityParser}. See {@link NCPipeline} for documentation on the entities in the
+  * overall processing pipeline.
+  *
+  * <span class="hdr">Metadata</span>
+  * Note that both {@link NCToken} and {@link NCEntity} interfaces extend {@link NCPropertyMap} interface
+  * that allows them to store custom metadata properties. Parser, enrichers and validators for tokens
+  * and entities use this capability to store and check their properties in tokens and entities.
+  *
+  * @see NCEntity
+  * @see NCToken
+  * @see NCTokenParser
+  * @see NCTokenEnricher
+  * @see NCTokenValidator
+  * @see NCEntityParser
+  * @see NCEntityEnricher
+  * @see NCEntityValidator
+  * @see NCEntityMapper */
+trait NCEntity extends NCPropertyMap:
+
+    /**
+      * Gets the list of tokens this entity is comprised of. Ruturned list is never empty or {@code null}.
+      *
+      * @return List of tokens that are part of this entity. */
+    def getTokens: List[NCToken]
+
+    /**
+      * Joins all tokens' text with trimming using space as a delimiter. This function does not cache the
+      * result and performs text construction on each call. Make sure to cache the result to avoid
+      * unnecessary parasitic workload if and when method {@link # getTokens ( )} does not change.
+      *
+      * @return Constructuted textual representation of this entity. Note that returned value is not
+      * cached and created anew every time this method is called. */
+    def mkText: String = getTokens.map(_.getText.trim).mkString(" ").trim
+
+    /**
+      * Gets ID of the request this entity is part of.
+      *
+      * @return ID of the request this entity is part of. */
+    def getRequestId: String
+
+    /**
+      * Gets optional set of groups this entity belongs to.
+      *
+      * @return Optional set of groups this entity belongs to. Returned set can be empty but never {@code null}. */
+    def getGroups: Set[String] = Set(getId)
+
+    /**
+      * Gets globally unique ID of this entity.
+      *
+      * @return Globally unique ID of this entity. */
+    def getId: String
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityEnricher.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityEnricher.java
deleted file mode 100644
index 118948f0..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityEnricher.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.util.List;
-
-/**
- * A pipeline component that enrichers entities by settings their properties.
- * See {@link NCPipeline} for documentation on the overall processing pipeline. Note that this is
- * an optional component in the pipeline.
- *
- * @see NCPipeline#getEntityEnrichers()
- * @see NCEntity
- * @see NCToken
- * @see NCTokenParser
- * @see NCTokenEnricher
- * @see NCTokenValidator
- * @see NCEntityParser
- * @see NCEntityEnricher
- * @see NCEntityValidator
- * @see NCEntityMapper
- */
-public interface NCEntityEnricher extends NCLifecycle {
-    /**
-     * Enriches given list of entities by settings their properties.
-     *
-     * @param req Input request descriptor.
-     * @param cfg Configuration of the model this component is associated with.
-     * @param ents List of entities to enrich.
-     * @throws NCException Thrown in case of any errors.
-     * @see NCPropertyMap
-     */
-    void enrich(NCRequest req, NCModelConfig cfg, List<NCEntity> ents);
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityEnricher.scala
new file mode 100644
index 00000000..87761372
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityEnricher.scala
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+/**
+  * A pipeline component that enrichers entities by settings their properties.
+  * See {@link NCPipeline} for documentation on the overall processing pipeline. Note that this is
+  * an optional component in the pipeline.
+  *
+  * @see NCPipeline#getEntityEnrichers()
+  * @see NCEntity
+  * @see NCToken
+  * @see NCTokenParser
+  * @see NCTokenEnricher
+  * @see NCTokenValidator
+  * @see NCEntityParser
+  * @see NCEntityEnricher
+  * @see NCEntityValidator
+  * @see NCEntityMapper
+  */
+trait NCEntityEnricher extends NCLifecycle :
+    /**
+      * Enriches given list of entities by settings their properties.
+      *
+      * @param req Input request descriptor.
+      * @param cfg Configuration of the model this component is associated with.
+      * @param ents List of entities to enrich.
+      * @throws NCException Thrown in case of any errors.
+      * @see NCPropertyMap */
+    def enrich(req: NCRequest, cfg: NCModelConfig, ents: List[NCEntity]): Unit
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityMapper.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityMapper.java
deleted file mode 100644
index cbe8e226..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityMapper.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.util.List;
-
-/**
- * A pipeline component that allows to map one set of entities into another after the entities were parsed
- * and enriched. Entity mapper is an optional component and the pipeline can have zero or more entity mappers. Mappers
- * are typically used for combine several existing entities into a new one without necessarily touching the entity
- * parser or enrichers. See {@link NCPipeline} for documentation on the overall processing pipeline.
- *
- * @see NCPipeline#getEntityMappers()
- * @see NCEntity
- * @see NCToken
- * @see NCTokenParser
- * @see NCTokenEnricher
- * @see NCTokenValidator
- * @see NCEntityParser
- * @see NCEntityEnricher
- * @see NCEntityValidator
- * @see NCEntityMapper
- */
-public interface NCEntityMapper extends NCLifecycle {
-    /**
-     * Maps given of entities into a new list of entities.
-     *
-     * @param req Input request descriptor.
-     * @param cfg Configuration of the model this component is associated with.
-     * @param ents List of entities to map.
-     * @return List of entities (new or existing ones).
-     */
-    List<NCEntity> map(NCRequest req, NCModelConfig cfg, List<NCEntity> ents);
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityMapper.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityMapper.scala
new file mode 100644
index 00000000..7e45ca02
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityMapper.scala
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+/**
+  * A pipeline component that allows to map one set of entities into another after the entities were parsed
+  * and enriched. Entity mapper is an optional component and the pipeline can have zero or more entity mappers. Mappers
+  * are typically used for combine several existing entities into a new one without necessarily touching the entity
+  * parser or enrichers. See {@link NCPipeline} for documentation on the overall processing pipeline.
+  *
+  * @see NCPipeline#getEntityMappers()
+  * @see NCEntity
+  * @see NCToken
+  * @see NCTokenParser
+  * @see NCTokenEnricher
+  * @see NCTokenValidator
+  * @see NCEntityParser
+  * @see NCEntityEnricher
+  * @see NCEntityValidator
+  * @see NCEntityMapper
+  */
+trait NCEntityMapper extends NCLifecycle:
+    /**
+      * Maps given of entities into a new list of entities.
+      *
+      * @param req Input request descriptor.
+      * @param cfg Configuration of the model this component is associated with.
+      * @param ents List of entities to map.
+      * @return List of entities (new or existing ones).
+      */
+    def map(req: NCRequest, cfg: NCModelConfig, ents: List[NCEntity]) :List[NCEntity]
+
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityParser.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityParser.java
deleted file mode 100644
index f77cc52d..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityParser.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.util.List;
-
-/**
- * A pipeline component that converts list of tokens into the list of entities.
- * See {@link NCPipeline} for documentation on the overall processing pipeline. Note that pipeline
- * must have at least one entity parser.
- *
- * @see NCPipeline#getEntityParsers()
- * @see NCEntity
- * @see NCToken
- * @see NCTokenParser
- * @see NCTokenEnricher
- * @see NCTokenValidator
- * @see NCEntityParser
- * @see NCEntityEnricher
- * @see NCEntityValidator
- * @see NCEntityMapper
- */
-public interface NCEntityParser extends NCLifecycle {
-    /**
-     * Converts, parses, given list of tokens into the list of entities.
-     *
-     * @param req Input request descriptor.
-     * @param cfg Configuration of the model this component is associated with.
-     * @param toks List of tokens to convert.
-     * @return List of parsed entities, potentially empty but never {@code null}.
-     * @@throws NCException Thrown in case of any errors.
-     */
-    List<NCEntity> parse(NCRequest req, NCModelConfig cfg, List<NCToken> toks);
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityParser.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityParser.scala
new file mode 100644
index 00000000..d2687de5
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityParser.scala
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+/**
+  * A pipeline component that converts list of tokens into the list of entities.
+  * See {@link NCPipeline} for documentation on the overall processing pipeline. Note that pipeline
+  * must have at least one entity parser.
+  *
+  * @see NCPipeline#getEntityParsers()
+  * @see NCEntity
+  * @see NCToken
+  * @see NCTokenParser
+  * @see NCTokenEnricher
+  * @see NCTokenValidator
+  * @see NCEntityParser
+  * @see NCEntityEnricher
+  * @see NCEntityValidator
+  * @see NCEntityMapper */
+trait NCEntityParser extends NCLifecycle:
+    /**
+      * Converts, parses, given list of tokens into the list of entities.
+      *
+      * @param req Input request descriptor.
+      * @param cfg Configuration of the model this component is associated with.
+      * @param toks List of tokens to convert.
+      * @return List of parsed entities, potentially empty but never {@code null}.
+      * @@throws NCException Thrown in case of any errors. */
+    def parse(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): List[NCEntity]
+
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityValidator.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityValidator.java
deleted file mode 100644
index 9e487761..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityValidator.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.util.List;
-
-/**
- * A pipeline components that validates the final list of parsed and enriched entities.
- * See {@link NCPipeline} for documentation on the overall processing pipeline. Note that this is an
- * optional component.
- *
- * @see NCPipeline#getEntityValidators()
- * @see NCEntity
- * @see NCToken
- * @see NCTokenParser
- * @see NCTokenEnricher
- * @see NCTokenValidator
- * @see NCEntityParser
- * @see NCEntityEnricher
- * @see NCEntityValidator
- * @see NCEntityMapper
- */
-public interface NCEntityValidator extends NCLifecycle {
-    /**
-     *
-     * @param req Input request descriptor.
-     * @param cfg Configuration of the model this component is associated with.
-     * @param ents List of entities to validate.
-     * @@throws NCException Thrown in case of any validation violations.
-     */
-    void validate(NCRequest req, NCModelConfig cfg, List<NCEntity> ents);
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityValidator.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityValidator.scala
new file mode 100644
index 00000000..2ba7e195
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntityValidator.scala
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+/**
+  * A pipeline components that validates the final list of parsed and enriched entities.
+  * See {@link NCPipeline} for documentation on the overall processing pipeline. Note that this is an
+  * optional component.
+  *
+  * @see NCPipeline#getEntityValidators()
+  * @see NCEntity
+  * @see NCToken
+  * @see NCTokenParser
+  * @see NCTokenEnricher
+  * @see NCTokenValidator
+  * @see NCEntityParser
+  * @see NCEntityEnricher
+  * @see NCEntityValidator
+  * @see NCEntityMapper */
+trait NCEntityValidator extends NCLifecycle:
+    /**
+      *
+      * @param req Input request descriptor.
+      * @param cfg Configuration of the model this component is associated with.
+      * @param ents List of entities to validate.
+      * @@throws NCException Thrown in case of any validation violations. */
+    def validate(req: NCRequest, cfg: NCModelConfig, ents: List[NCEntity]): Unit
+
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCException.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCException.java
deleted file mode 100644
index 597a3af5..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCException.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-/**
- * Base NLPCraft exception.
- */
-public class NCException extends RuntimeException {
-    /**
-     * Creates new exception with given parameters.
-     *
-     * @param msg Error message.
-     * @param cause Optional cause of this exception.
-     */
-    public NCException(String msg, Throwable cause) {
-        super(msg, cause);
-    }
-
-    /**
-     * Creates new exception with given error message.
-     *
-     * @param msg Error message.
-     */
-    public NCException(String msg) {
-        super(msg);
-    }
-}
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCResultType.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCException.scala
similarity index 80%
copy from nlpcraft/src/main/scala/org/apache/nlpcraft/NCResultType.java
copy to nlpcraft/src/main/scala/org/apache/nlpcraft/NCException.scala
index 400d4067..419d1f9c 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCResultType.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCException.scala
@@ -15,19 +15,8 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft;
+package org.apache.nlpcraft
 
 /**
- *
- */
-public enum NCResultType {
-    /**
-     * Final result is ready.
-     */
-    ASK_RESULT,
-
-    /**
-     * Ask user back engaging in dialog.
-     */
-    ASK_DIALOG
-}
+  * Base NLPCraft exception. */
+class NCException(msg: String, cause: Throwable = null) extends RuntimeException(msg, cause)
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentMatch.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentMatch.java
deleted file mode 100644
index 8794cb29..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentMatch.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.util.List;
-
-/**
- *
- */
-public interface NCIntentMatch {
-    /**
-     * Gets ID of the matched intent.
-     *
-     * @return ID of the matched intent.
-     */
-    String getIntentId();
-
-    /**
-     * Gets a subset of entities representing matched intent. This subset is grouped by the matched terms
-     * where a {@code null} sub-list defines an optional term. Order and index of sub-lists corresponds
-     * to the order and index of terms in the matching intent. Number of sub-lists will always be the same
-     * as the number of terms in the matched intent.
-     * <p>
-     * Consider using {@link NCIntentTerm} annotation instead for simpler access to the intent entities.
-     *
-     * @return List of lists of entities representing matched intent.
-     * @see #getVariant()
-     * @see NCIntentTerm
-     */
-    List<List<NCEntity>> getIntentEntities();
-
-    /**
-     * Gets entities for given term. This is a companion method for {@link #getIntentEntities()}.
-     * <p>
-     * Consider using {@link NCIntentTerm} annotation instead for simpler access to the intent entities.
-     *
-     * @param idx Index of the term (starting from <code>0</code>).
-     * @return List of entities, potentially {@code null}, for given term.
-     * @see NCIntentTerm
-     * @see #getTermEntities(String)
-     */
-    List<NCEntity> getTermEntities(int idx);
-
-    /**
-     * Gets entities for given term. This is a companion method for {@link #getIntentEntities()}.
-     * <p>
-     * Consider using {@link NCIntentTerm} annotation instead for simpler access to the intent entities.
-     *
-     * @param termId ID of the term for which to get entities.
-     * @return List of entities, potentially {@code null}, for given term.
-     * @see NCIntentTerm
-     * @see #getTermEntities(int)
-     */
-    List<NCEntity> getTermEntities(String termId);
-
-    /**
-     * Gets parsing variant that produced the matching for this intent. Returned variant is one of the
-     * variants provided by {@link NCContext#getVariants()} methods. Note that entities returned by this method are
-     * a superset of the entities returned by {@link #getIntentEntities()}  method, i.e. not all entities
-     * from this variant may have been used in matching of the winning intent.
-     *
-     * @return Parsing variant that produced the matching for this intent.
-     * @see #getIntentEntities()
-     */
-    NCVariant getVariant();
-
-
-    /**
-     * Gets context of the user input query.
-     *
-     * @return Original query context.
-     */
-    NCContext getContext();
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentMatch.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentMatch.scala
new file mode 100644
index 00000000..855de21e
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentMatch.scala
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+trait NCIntentMatch:
+    /**
+      * Gets ID of the matched intent.
+      *
+      * @return ID of the matched intent. */
+    def getIntentId: String
+
+    /**
+      * Gets a subset of entities representing matched intent. This subset is grouped by the matched terms
+      * where a {@code null} sub-list defines an optional term. Order and index of sub-lists corresponds
+      * to the order and index of terms in the matching intent. Number of sub-lists will always be the same
+      * as the number of terms in the matched intent.
+      * <p>
+      * Consider using {@link NCIntentTerm} annotation instead for simpler access to the intent entities.
+      *
+      * @return List of lists of entities representing matched intent.
+      * @see #getVariant()
+      * @see NCIntentTerm */
+    def getIntentEntities: List[List[NCEntity]]
+
+    /**
+      * Gets entities for given term. This is a companion method for {@link # getIntentEntities ( )}.
+      * <p>
+      * Consider using {@link NCIntentTerm} annotation instead for simpler access to the intent entities.
+      *
+      * @param idx Index of the term (starting from <code>0</code>).
+      * @return List of entities, potentially {@code null}, for given term.
+      * @see NCIntentTerm
+      * @see #getTermEntities(String) */
+    def getTermEntities(idx: Int): List[NCEntity]
+
+    /**
+      * Gets entities for given term. This is a companion method for {@link # getIntentEntities ( )}.
+      * <p>
+      * Consider using {@link NCIntentTerm} annotation instead for simpler access to the intent entities.
+      *
+      * @param termId ID of the term for which to get entities.
+      * @return List of entities, potentially {@code null}, for given term.
+      * @see NCIntentTerm
+      * @see #getTermEntities(int) */
+    def getTermEntities(termId: String):  List[NCEntity]
+
+    /**
+      * Gets parsing variant that produced the matching for this intent. Returned variant is one of the
+      * variants provided by {@link NCContext# getVariants ( )} methods. Note that entities returned by this method are
+      * a superset of the entities returned by {@link # getIntentEntities ( )}  method, i.e. not all entities
+      * from this variant may have been used in matching of the winning intent.
+      *
+      * @return Parsing variant that produced the matching for this intent.
+      * @see #getIntentEntities() */
+    def getVariant: NCVariant
+
+    /**
+      * Gets context of the user input query.
+      *
+      * @return Original query context. */
+    def getContext: NCContext
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentSkip.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentSkip.java
deleted file mode 100644
index ae6605a1..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentSkip.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-/**
- * Control flow exception to skip current intent. This exception can be thrown by the intent
- * callback to indicate that current intent should be skipped (even though
- * it was matched and its callback was called). If there's more than one intent matched the next best matching intent
- * will be selected and its callback will be called.
- * <p>
- * This exception becomes useful when it is hard or impossible to encode the entire matching logic using just
- * declarative IDL. In these cases the intent definition can be relaxed and the "last mile" of intent
- * matching can happen inside the intent callback's user logic. If it is determined that intent in fact does
- * not match then throwing this exception allows to try next best matching intent, if any.
- *
- * @see NCIntent
- * @see NCIntentTerm
- * @see NCIntentRef
- * @see NCIntentSample
- * @see NCIntentSampleRef
- * @see NCIntentMatch
- * @see NCModel#onMatchedIntent(NCIntentMatch)
- */
-public class NCIntentSkip extends NCException {
-    /**
-     * Creates new intent skip exception.
-     */
-    public NCIntentSkip() {
-        super("Intent skipped.");
-    }
-
-    /**
-     * Creates new intent skip exception with given debug message.
-     *
-     * @param msg Skip message for debug output.
-     */
-    public NCIntentSkip(String msg) {
-        super(msg);
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentSkip.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentSkip.scala
new file mode 100644
index 00000000..e9ffd3db
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentSkip.scala
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+/**
+  * Control flow exception to skip current intent. This exception can be thrown by the intent
+  * callback to indicate that current intent should be skipped (even though
+  * it was matched and its callback was called). If there's more than one intent matched the next best matching intent
+  * will be selected and its callback will be called.
+  * <p>
+  * This exception becomes useful when it is hard or impossible to encode the entire matching logic using just
+  * declarative IDL. In these cases the intent definition can be relaxed and the "last mile" of intent
+  * matching can happen inside the intent callback's user logic. If it is determined that intent in fact does
+  * not match then throwing this exception allows to try next best matching intent, if any.
+  *
+  * @see NCIntent
+  * @see NCIntentTerm
+  * @see NCIntentRef
+  * @see NCIntentSample
+  * @see NCIntentSampleRef
+  * @see NCIntentMatch
+  * @see NCModel#onMatchedIntent(NCIntentMatch) */
+class NCIntentSkip(msg: String, cause: Throwable = null) extends NCException(msg, cause)
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCLifecycle.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCLifecycle.java
deleted file mode 100644
index d564c984..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCLifecycle.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-/**
- * Lifecycle callbacks for various pipeline components.
- *
- * @see NCTokenParser
- * @see NCTokenEnricher
- * @see NCTokenValidator
- * @see NCEntityParser
- * @see NCEntityEnricher
- * @see NCEntityValidator
- */
-public interface NCLifecycle {
-    /**
-     * Called when the component starts. Default implementation is no-op.
-     *
-     * @param cfg Configuration of the model this component is associated with.
-     */
-    default void onStart(NCModelConfig cfg) {
-        // No-op.
-    }
-
-    /**
-     * Called when the component stops. Default implementation is no-op.
-     *
-     * @param cfg Configuration of the model this component is associated with.
-     */
-    default void onStop(NCModelConfig cfg) {
-        // No-op.
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelAdapter.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCLifecycle.scala
similarity index 53%
copy from nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelAdapter.java
copy to nlpcraft/src/main/scala/org/apache/nlpcraft/NCLifecycle.scala
index e517ed83..7c063ec0 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelAdapter.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCLifecycle.scala
@@ -15,37 +15,26 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft;
-
-import java.util.Objects;
+package org.apache.nlpcraft
 
 /**
- *
- */
-public class NCModelAdapter implements NCModel {
-    private final NCModelConfig cfg;
-    private final NCPipeline pipeline;
-
+  * Lifecycle callbacks for various pipeline components.
+  *
+  * @see NCTokenParser
+  * @see NCTokenEnricher
+  * @see NCTokenValidator
+  * @see NCEntityParser
+  * @see NCEntityEnricher
+  * @see NCEntityValidator */
+trait NCLifecycle:
     /**
-     *
-     * @param cfg
-     * @param pipeline
-     */
-    public NCModelAdapter(NCModelConfig cfg, NCPipeline pipeline) {
-        Objects.requireNonNull(cfg, "Model config cannot be null.");
-        Objects.requireNonNull(pipeline, "Model pipeline cannot be null.");
+      * Called when the component starts. Default implementation is no-op.
+      *
+      * @param cfg Configuration of the model this component is associated with. */
+    def onStart(cfg: NCModelConfig): Unit = () // No-op.
 
-        this.cfg = cfg;
-        this.pipeline = pipeline;
-    }
-
-    @Override
-    public NCModelConfig getConfig() {
-        return cfg;
-    }
-
-    @Override
-    public NCPipeline getPipeline() {
-        return pipeline;
-    }
-}
+    /**
+      * Called when the component stops. Default implementation is no-op.
+      *
+      * @param cfg Configuration of the model this component is associated with. */
+    def onStop(cfg: NCModelConfig): Unit = () // No-op.
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModel.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModel.java
deleted file mode 100644
index 79ae63b1..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModel.java
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-/**
- * User data model
- * <p>
- * Data model is a holder for user-define NLP processing logic that provides an interface to your data sources like a
- * database or a SaaS application. NLPCraft employs model-as-a-code approach where entire data model is an
- * implementation of this interface which can be developed using any JVM programming language like Java, Scala,
- * Kotlin, or Groovy. The instance of this interface is passed to {@link NCModelClient} class and contains:
- * <ul>
- *     <li>Model {@link #getConfig() configurfation}.</li>
- *     <li>Model {@link #getPipeline() processing pipeline}.</li>
- *     <li>Life-cycle callbacks.</li>
- * </ul>
- * Note that model-as-a-code approach natively supports any software life cycle tools and frameworks like various
- * build tools, CI/SCM tools, IDEs, etc. You don't need an additional web-based tools to manage
- * some aspects of your data models - your entire model and all of its components are part of your project's source code.
- * <p>
- * In most cases, one would use a convenient {@link NCModelAdapter} adapter to implement this interface. Here's a snippet
- * of the user data model from LightSwitch example:
- * <pre class="brush: java, highlight: [1]">
- * public class LightSwitchJavaModel extends NCModelAdapter {
- *     public LightSwitchJavaModel() {
- *         super(
- *             new NCModelConfig("nlpcraft.lightswitch.java.ex", "LightSwitch Example Model", "1.0"),
- *             new NCPipelineBuilder().withSemantic("en", "lightswitch_model.yaml").build()
- *         );
- *     }
- *
- *     &#64;NCIntent("intent=ls term(act)={has(ent_groups, 'act')} term(loc)={# == 'ls:loc'}*")
- *     NCResult onMatch(
- *          &#64;NCIntentTerm("act") NCEntity actEnt,
- *          &#64;NCIntentTerm("loc") List&lt;NCEntity&gt; locEnts
- *      ) {
- *          String status=actEnt.getId().equals("ls:on")?"on":"off";
- *          String locations=locEnts.isEmpty() ? "entire house":
- *              locEnts.stream().map(NCEntity::mkText).collect(Collectors.joining(", "));
- *
- *          return new NCResult(
- *              "Lights are [" + status + "] in [" + locations.toLowerCase() + "].",
- *              NCResultType.ASK_RESULT
- *          );
- *      }
- * }
- * </pre>
- *
- * @see NCModelClient
- * @see NCModelAdapter
- */
-public interface NCModel {
-    /**
-     * Gets model configuration.
-     *
-     * @return Model configuration.
-     */
-    NCModelConfig getConfig();
-
-    /**
-     * Gets model NLP processing pipeline.
-     *
-     * @return NLP processing pipeline.
-     */
-    NCPipeline getPipeline();
-
-    /**
-     * A callback to accept or reject a parsed variant. This callback is called before any other callbacks at the
-     * beginning of the processing pipeline, and it is called for each parsed variant.
-     * <p>
-     * Note that a given input query can have one or more possible different parsing variants. Depending on model
-     * configuration an input query can produce hundreds or even thousands of parsing variants that can significantly
-     * slow down the overall processing. This method allows to filter out unnecessary parsing variants based on
-     * variety of user-defined factors like number of entities, presence of a particular entity in the variant, etc.
-     * <p>
-     * By default, this method accepts all variants (returns {@code true}).
-     *
-     * @param vrn A parsing variant to accept or reject.
-     * @return {@code True} to accept variant for further processing, {@code false} otherwise.
-     */
-    default boolean onVariant(NCVariant vrn) {
-        return true;
-    }
-
-    /**
-     * A callback that is called when a fully assembled query context is ready. This callback is called after
-     * all {@link #onVariant(NCVariant)} callbacks are called but before any {@link #onMatchedIntent(NCIntentMatch)} are
-     * called, i.e. right before the intent matching is performed. It's called always once per input query processing.
-     * Typical use case for this callback is to perform logging, debugging, statistic or usage collection, explicit
-     * update or initialization of conversation context, security audit or validation, etc.
-     * <p>
-     * Default implementation returns {@code null}.
-     *
-     * @param ctx Input query context.
-     * @return Optional query result to return interrupting the default workflow. Specifically, if this method
-     *      returns a non-{@code null} result, it will be returned to the caller immediately overriding default behavior.
-     *      If the method returns {@code null} - the default processing flow will continue.
-     * @throws NCRejection This callback can throw the rejection exception to abort input query processing.
-     */
-    default NCResult onContext(NCContext ctx) throws NCRejection {
-        return null;
-    }
-
-    /**
-     * A callback that is called when intent was successfully matched but right before its callback is called. This
-     * callback is called after {@link #onContext(NCContext)} is called and may be called multiple times
-     * depending on its return value. If {@code true} is returned than the default workflow will continue and the
-     * matched intent's callback will be called. However, if {@code null} is returned than the entire existing set of
-     * parsing variants will be matched against all declared intents again. Returning {@code false} allows this
-     * method to alter the state of the model (like soft-reset conversation or change metadata) and force the
-     * full re-evaluation of the parsing variants against all declared intents.
-     * <p>
-     * Note that user logic should be careful not to induce infinite loop in this behavior.
-     * Note that this callback may not be called at all based on the return value of {@link #onContext(NCContext)} callback.
-     * Typical use case for this callback is to perform logging, debugging, statistic or usage collection, explicit
-     * update or initialization of conversation context, security audit or validation, etc.
-     * <p>
-     * By default, this method returns {@code true}.
-     *
-     * @param ctx Intent match context - the same instance that's passed to the matched intent callback.
-     * @return If {@code true} is returned than the default workflow will continue and the matched intent's
-     *      callback will be called. However, if {@code false} is returned than the entire existing set of
-     *      parsing variants will be matched against all declared intents again. Returning false allows this
-     *      method to alter the state of the model (like soft-reset conversation or change metadata) and force
-     *      the re-evaluation of the parsing variants against all declared intents. Note that user logic should be
-     *      careful not to induce infinite loop in this behavior.
-     * @throws NCRejection This callback can throw the rejection exception to abort user request processing. In this
-     *      case the {@link #onRejection(NCIntentMatch, NCRejection)} callback will be called next.
-     */
-    default boolean onMatchedIntent(NCIntentMatch ctx) throws NCRejection {
-        return true;
-    }
-
-    /**
-     * A callback that is called when successful result is obtained from the intent callback and right before
-     * sending it back to the caller. This callback is called after {@link #onMatchedIntent(NCIntentMatch)} is called.
-     * Note that this callback may not be called at all, and if called - it's called only once. Typical use case
-     * for this callback is to perform logging, debugging, statistic or usage collection, explicit update or
-     * initialization of conversation context, security audit or validation, etc.
-     * <p>
-     * Default implementation is a no-op returning {@code null}.
-     *
-     * @param ctx Intent match context - the same instance that's passed to the matched intent callback
-     *      that produced this result.
-     * @param res Existing result.
-     * @return Optional query result to return interrupting the default workflow. Specifically, if this
-     *      method returns a non-{@code null} result, it will be returned to the caller immediately overriding
-     *      default behavior and existing query result or error processing, if any. If the method returns {@code null} -
-     *      the default processing flow will continue.
-     */
-    default NCResult onResult(NCIntentMatch ctx, NCResult res) {
-        return null;
-    }
-
-    /**
-     * A callback that is called when intent callback threw NCRejection exception. This callback is called
-     * after {@link #onMatchedIntent(NCIntentMatch)} is called. Note that this callback may not be called at all,
-     * and if called - it's called only once. Typical use case for this callback is to perform logging, debugging,
-     * statistic or usage collection, explicit update or initialization of conversation context, security audit or
-     * validation, etc.
-     * <p>
-     * Default implementation is a no-op returning {@code null}.
-     *
-     * @param ctx Optional intent match context - the same instance that's passed to the matched intent callback
-     *      that produced this rejection. It is {@code null} if rejection was triggered outside the intent callback.
-     * @param e Rejection exception.
-     * @return Optional query result to return interrupting the default workflow. Specifically, if this method
-     *      returns a non-{@code null} result, it will be returned to the caller immediately overriding default behavior
-     *      and existing query result or error processing, if any. If the method returns {@code null} - the default
-     *      processing flow will continue.
-     */
-    default NCResult onRejection(NCIntentMatch ctx, NCRejection e) {
-        return null;
-    }
-
-    /**
-     * A callback that is called when intent callback failed with unexpected exception. Note that this callback may
-     * not be called at all, and if called - it's called only once. Typical use case for this callback is
-     * to perform logging, debugging, statistic or usage collection, explicit update or initialization of conversation
-     * context, security audit or validation, etc.
-     * <p>
-     * Default implementation is a no-op returning {@code null}.
-     *
-     * @param ctx Intent match context - the same instance that's passed to the matched intent that produced this error.
-     * @param e Failure exception.
-     * @return Optional query result to return interrupting the default workflow. Specifically, if this method
-     *      returns a non-{@code null} result, it will be returned to the caller immediately overriding default
-     *      behavior and existing query result or error processing, if any. If the method returns {@code null} - the
-     *      default processing flow will continue.
-     */
-    default NCResult onError(NCContext ctx, Throwable e) {
-        return null;
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModel.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModel.scala
new file mode 100644
index 00000000..e98c8c8e
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModel.scala
@@ -0,0 +1,187 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+/**
+  * User data model
+  * <p>
+  * Data model is a holder for user-define NLP processing logic that provides an interface to your data sources like a
+  * database or a SaaS application. NLPCraft employs model-as-a-code approach where entire data model is an
+  * implementation of this interface which can be developed using any JVM programming language like Java, Scala,
+  * Kotlin, or Groovy. The instance of this interface is passed to {@link NCModelClient} class and contains:
+  * <ul>
+  * <li>Model {@link # getConfig ( ) configurfation}.</li>
+  * <li>Model {@link # getPipeline ( ) processing pipeline}.</li>
+  * <li>Life-cycle callbacks.</li>
+  * </ul>
+  * Note that model-as-a-code approach natively supports any software life cycle tools and frameworks like various
+  * build tools, CI/SCM tools, IDEs, etc. You don't need an additional web-based tools to manage
+  * some aspects of your data models - your entire model and all of its components are part of your project's source code.
+  * <p>
+  * In most cases, one would use a convenient {@link NCModelAdapter} adapter to implement this interface. Here's a snippet
+  * of the user data model from LightSwitch example:
+  * <pre class="brush: java, highlight: [1]">
+  * public class LightSwitchJavaModel extends NCModelAdapter {
+  * public LightSwitchJavaModel() {
+  * super(
+  * new NCModelConfig("nlpcraft.lightswitch.java.ex", "LightSwitch Example Model", "1.0"),
+  * new NCPipelineBuilder().withSemantic("en", "lightswitch_model.yaml").build()
+  * );
+  * }
+  *
+  * &#64;NCIntent("intent=ls term(act)={has(ent_groups, 'act')} term(loc)={# == 'ls:loc'}*")
+  * NCResult onMatch(
+  * &#64;NCIntentTerm("act") NCEntity actEnt,
+  * &#64;NCIntentTerm("loc") List&lt;NCEntity&gt; locEnts
+  * ) {
+  * String status=actEnt.getId().equals("ls:on")?"on":"off";
+  * String locations=locEnts.isEmpty() ? "entire house":
+  * locEnts.stream().map(NCEntity::mkText).collect(Collectors.joining(", "));
+  *
+  * return new NCResult(
+  * "Lights are [" + status + "] in [" + locations.toLowerCase() + "].",
+  * NCResultType.ASK_RESULT
+  * );
+  * }
+  * }
+  * </pre>
+  *
+  * @see NCModelClient
+  * @see NCModelAdapter */
+trait NCModel:
+    /**
+      * Gets model configuration.
+      *
+      * @return Model configuration. */
+    def getConfig: NCModelConfig
+
+    /**
+      * Gets model NLP processing pipeline.
+      *
+      * @return NLP processing pipeline. */
+    def getPipeline: NCPipeline
+
+    /**
+      * A callback to accept or reject a parsed variant. This callback is called before any other callbacks at the
+      * beginning of the processing pipeline, and it is called for each parsed variant.
+      * <p>
+      * Note that a given input query can have one or more possible different parsing variants. Depending on model
+      * configuration an input query can produce hundreds or even thousands of parsing variants that can significantly
+      * slow down the overall processing. This method allows to filter out unnecessary parsing variants based on
+      * variety of user-defined factors like number of entities, presence of a particular entity in the variant, etc.
+      * <p>
+      * By default, this method accepts all variants (returns {@code true}).
+      *
+      * @param vrn A parsing variant to accept or reject.
+      * @return {@code True} to accept variant for further processing, {@code false} otherwise. */
+    def onVariant(vrn: NCVariant) = true
+
+    /**
+      * A callback that is called when a fully assembled query context is ready. This callback is called after
+      * all {@link # onVariant ( NCVariant )} callbacks are called but before any {@link # onMatchedIntent ( NCIntentMatch )} are
+      * called, i.e. right before the intent matching is performed. It's called always once per input query processing.
+      * Typical use case for this callback is to perform logging, debugging, statistic or usage collection, explicit
+      * update or initialization of conversation context, security audit or validation, etc.
+      * <p>
+      * Default implementation returns {@code null}.
+      *
+      * @param ctx Input query context.
+      * @return Optional query result to return interrupting the default workflow. Specifically, if this method
+      * returns a non-{@code null} result, it will be returned to the caller immediately overriding default behavior.
+      * If the method returns {@code null} - the default processing flow will continue.
+      * @throws NCRejection This callback can throw the rejection exception to abort input query processing. */
+    @throws[NCRejection] def onContext(ctx: NCContext): NCResult = null
+
+    /**
+      * A callback that is called when intent was successfully matched but right before its callback is called. This
+      * callback is called after {@link # onContext ( NCContext )} is called and may be called multiple times
+      * depending on its return value. If {@code true} is returned than the default workflow will continue and the
+      * matched intent's callback will be called. However, if {@code null} is returned than the entire existing set of
+      * parsing variants will be matched against all declared intents again. Returning {@code false} allows this
+      * method to alter the state of the model (like soft-reset conversation or change metadata) and force the
+      * full re-evaluation of the parsing variants against all declared intents.
+      * <p>
+      * Note that user logic should be careful not to induce infinite loop in this behavior.
+      * Note that this callback may not be called at all based on the return value of {@link # onContext ( NCContext )} callback.
+      * Typical use case for this callback is to perform logging, debugging, statistic or usage collection, explicit
+      * update or initialization of conversation context, security audit or validation, etc.
+      * <p>
+      * By default, this method returns {@code true}.
+      *
+      * @param ctx Intent match context - the same instance that's passed to the matched intent callback.
+      * @return If {@code true} is returned than the default workflow will continue and the matched intent's
+      * callback will be called. However, if {@code false} is returned than the entire existing set of
+      * parsing variants will be matched against all declared intents again. Returning false allows this
+      * method to alter the state of the model (like soft-reset conversation or change metadata) and force
+      * the re-evaluation of the parsing variants against all declared intents. Note that user logic should be
+      * careful not to induce infinite loop in this behavior.
+      * @throws NCRejection This callback can throw the rejection exception to abort user request processing. In this
+      * case the {@link # onRejection ( NCIntentMatch, NCRejection)} callback will be called next. */
+    @throws[NCRejection] def onMatchedIntent(ctx: NCIntentMatch) = true
+
+    /**
+      * A callback that is called when successful result is obtained from the intent callback and right before
+      * sending it back to the caller. This callback is called after {@link # onMatchedIntent ( NCIntentMatch )} is called.
+      * Note that this callback may not be called at all, and if called - it's called only once. Typical use case
+      * for this callback is to perform logging, debugging, statistic or usage collection, explicit update or
+      * initialization of conversation context, security audit or validation, etc.
+      * <p>
+      * Default implementation is a no-op returning {@code null}.
+      *
+      * @param ctx Intent match context - the same instance that's passed to the matched intent callback
+      * that produced this result.
+      * @param res Existing result.
+      * @return Optional query result to return interrupting the default workflow. Specifically, if this
+      * method returns a non-{@code null} result, it will be returned to the caller immediately overriding
+      * default behavior and existing query result or error processing, if any. If the method returns {@code null} -
+      * the default processing flow will continue. */
+    def onResult(ctx: NCIntentMatch, res: NCResult): NCResult = null
+
+    /**
+      * A callback that is called when intent callback threw NCRejection exception. This callback is called
+      * after {@link # onMatchedIntent ( NCIntentMatch )} is called. Note that this callback may not be called at all,
+      * and if called - it's called only once. Typical use case for this callback is to perform logging, debugging,
+      * statistic or usage collection, explicit update or initialization of conversation context, security audit or
+      * validation, etc.
+      * <p>
+      * Default implementation is a no-op returning {@code null}.
+      *
+      * @param ctx Optional intent match context - the same instance that's passed to the matched intent callback
+      * that produced this rejection. It is {@code null} if rejection was triggered outside the intent callback.
+      * @param e Rejection exception.
+      * @return Optional query result to return interrupting the default workflow. Specifically, if this method
+      * returns a non-{@code null} result, it will be returned to the caller immediately overriding default behavior
+      * and existing query result or error processing, if any. If the method returns {@code null} - the default
+      * processing flow will continue. */
+    def onRejection(ctx: NCIntentMatch, e: NCRejection): NCResult = null
+
+    /**
+      * A callback that is called when intent callback failed with unexpected exception. Note that this callback may
+      * not be called at all, and if called - it's called only once. Typical use case for this callback is
+      * to perform logging, debugging, statistic or usage collection, explicit update or initialization of conversation
+      * context, security audit or validation, etc.
+      * <p>
+      * Default implementation is a no-op returning {@code null}.
+      *
+      * @param ctx Intent match context - the same instance that's passed to the matched intent that produced this error.
+      * @param e Failure exception.
+      * @return Optional query result to return interrupting the default workflow. Specifically, if this method
+      * returns a non-{@code null} result, it will be returned to the caller immediately overriding default
+      * behavior and existing query result or error processing, if any. If the method returns {@code null} - the
+      * default processing flow will continue. */
+    def onError(ctx: NCContext, e: Throwable): NCResult = null
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticStemmer.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelAdapter.scala
similarity index 80%
copy from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticStemmer.java
copy to nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelAdapter.scala
index 279e4f4d..fa9adf99 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticStemmer.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelAdapter.scala
@@ -15,16 +15,8 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.nlp.entity.parser.semantic;
+package org.apache.nlpcraft
 
-/**
- * 
- */
-public interface NCSemanticStemmer {
-    /**
-     *
-     * @param txt
-     * @return
-     */
-    String stem(String txt);
-}
+class NCModelAdapter(cfg: NCModelConfig, pipeline: NCPipeline) extends NCModel:
+    def getConfig: NCModelConfig = cfg
+    def getPipeline: NCPipeline = pipeline
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelClient.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelClient.java
deleted file mode 100644
index f6252332..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelClient.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import org.apache.nlpcraft.internal.impl.NCModelClientImpl;
-
-import java.util.Map;
-import java.util.function.Predicate;
-
-/**
- *
- */
-public class NCModelClient implements AutoCloseable {
-    private final NCModelClientImpl impl;
-
-    /**
-     *
-     * @param mdl
-     */
-    public NCModelClient(NCModel mdl) {
-        this.impl = new NCModelClientImpl(mdl);
-    }
-
-    /**
-     *
-     * @param txt
-     * @param data
-     * @param usrId
-     * @return
-     * @throws NCException
-     */
-    public NCResult ask(String txt, Map<String, Object> data, String usrId) {
-        return impl.ask(txt, data, usrId);
-    }
-
-    /**
-     *
-     * @param usrId
-     * @throws NCException
-     */
-    public void clearStm(String usrId) {
-        impl.clearStm(usrId);
-    }
-
-    /**
-     *
-     * @param usrId
-     * @param filter
-     */
-    public void clearStm(String usrId, Predicate<NCEntity> filter) {
-        impl.clearStm(usrId, filter);
-    }
-
-    /**
-     *
-     * @param usrId
-     * @throws NCException
-     */
-    public void clearDialog(String usrId) {
-        impl.clearDialog(usrId);
-    }
-
-    /**
-     *
-     * @param usrId
-     * @param filter
-     */
-    public void clearDialog(String usrId, Predicate<NCDialogFlowItem> filter) {
-        impl.clearDialog(usrId, filter);
-    }
-
-    /**
-     *
-     */
-    @Override
-    public void close() {
-        impl.close();
-    }
-
-    /**
-     *
-     */
-    public void validateSamples() {
-        impl.validateSamples();
-    }
-
-    /**
-     * TODO:
-     * Gets callback information which contains intent ID and callback arguments entities.
-     * Note that
-     *  - Callback is not called in this case.
-     *  - if model `onContext` method overrided - error thrown because we don't find intents in this case.
-     *
-     *  Callback.
-     *   - You can call callback only one time.
-     *   - You can't call callback if it is not last request.
-     *   - if you call callback and 'saveHistory' flag was true - dialog overriden by callback result instead of saved before empty result.
-     *   - if you call callback and 'saveHistory' flag was false - history data is still ignored.
-     *   - No matter of callback execution time - history data based on request timestamp.
-     *
-     * @param txt
-     * @param data
-     * @param usrId
-     * @param saveHistory if true that found intent data added to dialog flow (with empty NCResult, bacause callback wasn't called) and STM.
-     *                    if false that found intent is not saved in STM and dialog flow.
-     * @return
-     */
-    public NCCallbackData debugAsk(String txt, Map<String, Object> data, String usrId, boolean saveHistory) {
-        return impl.debugAsk(txt, data, usrId, saveHistory);
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelClientImpl.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelClient.scala
similarity index 74%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelClientImpl.scala
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelClient.scala
index 4615c0ca..423b52a0 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelClientImpl.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelClient.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.internal.impl
+package org.apache.nlpcraft
 
 import com.typesafe.scalalogging.LazyLogging
 import org.apache.nlpcraft.*
@@ -30,18 +30,14 @@ import org.apache.nlpcraft.internal.util.*
 import java.util
 import java.util.concurrent.*
 import java.util.concurrent.atomic.*
-import java.util.function.*
-import java.util.{Collections as JColls, List as JList, Map as JMap, *}
-import scala.collection.*
+import java.util.{Objects, UUID}
 import scala.concurrent.ExecutionContext
-import scala.jdk.CollectionConverters.*
-import scala.jdk.OptionConverters.*
 
 /**
   *
   * @param mdl
   */
-class NCModelClientImpl(mdl: NCModel) extends LazyLogging:
+class NCModelClient(mdl: NCModel) extends LazyLogging, AutoCloseable:
     verify()
 
     private val intents = NCModelScanner.scan(mdl)
@@ -51,7 +47,6 @@ class NCModelClientImpl(mdl: NCModel) extends LazyLogging:
     private val intentsMgr = NCIntentSolverManager(dlgMgr, convMgr, intents.map(p => p.intent -> p.function).toMap)
 
     init()
-
     /**
       *
       */
@@ -61,9 +56,9 @@ class NCModelClientImpl(mdl: NCModel) extends LazyLogging:
         val cfg = mdl.getConfig
         val pipeline = mdl.getPipeline
 
-        Objects.requireNonNull(cfg.getId, "Model ID cannot be null.")
-        Objects.requireNonNull(cfg.getName, "Model name cannot be null.")
-        Objects.requireNonNull(cfg.getVersion, "Model version cannot be null.")
+        Objects.requireNonNull(cfg.id, "Model ID cannot be null.")
+        Objects.requireNonNull(cfg.name, "Model name cannot be null.")
+        Objects.requireNonNull(cfg.version, "Model version cannot be null.")
         Objects.requireNonNull(pipeline.getTokenParser, "Token parser cannot be null.")
         Objects.requireNonNull(pipeline.getEntityParsers, "List of entity parsers in the pipeline cannot be null.")
 
@@ -83,22 +78,22 @@ class NCModelClientImpl(mdl: NCModel) extends LazyLogging:
       * @param typ
       * @return
       */
-    private def ask0(txt: String, data: JMap[String, AnyRef], usrId: String, typ: NCIntentSolveType): Either[NCResult, NCCallbackData] =
+    private def ask0(txt: String, data: Map[String, Any], usrId: String, typ: NCIntentSolveType): Either[NCResult, NCCallbackData] =
         val plData = plMgr.prepare(txt, data, usrId)
 
         val userId = plData.request.getUserId
         val convHldr = convMgr.getConversation(userId)
-        val allEnts = plData.variants.flatMap(_.getEntities.asScala)
+        val allEnts = plData.variants.flatMap(_.getEntities)
 
         convHldr.updateEntities()
 
         val conv: NCConversation =
             new NCConversation:
                 override val getData: NCPropertyMap = convHldr.getUserData
-                override val getStm: JList[NCEntity] = convHldr.getEntities.asJava
-                override val getDialogFlow: JList[NCDialogFlowItem] = dlgMgr.getDialogFlow(userId).asJava
-                override def clearStm(filter: Predicate[NCEntity]): Unit = convHldr.clear(filter)
-                override def clearDialog(filter: Predicate[NCDialogFlowItem]): Unit = dlgMgr.clear(userId, (s: NCDialogFlowItem) => filter.test(s))
+                override val getStm: List[NCEntity] = convHldr.getEntities
+                override val getDialogFlow: List[NCDialogFlowItem] = dlgMgr.getDialogFlow(userId)
+                override def clearStm(filter: NCEntity => Boolean): Unit = convHldr.clear(filter)
+                override def clearDialog(filter: NCDialogFlowItem => Boolean): Unit = dlgMgr.clear(userId, (s: NCDialogFlowItem) => filter(s))
 
         val ctx: NCContext =
             new NCContext:
@@ -106,8 +101,8 @@ class NCModelClientImpl(mdl: NCModel) extends LazyLogging:
                 override val getModelConfig: NCModelConfig = mdl.getConfig
                 override val getRequest: NCRequest = plData.request
                 override val getConversation: NCConversation = conv
-                override val getVariants: util.Collection[NCVariant] = plData.variants.asJava
-                override val getTokens: JList[NCToken] = plData.tokens
+                override val getVariants: List[NCVariant] = plData.variants
+                override val getTokens: List[NCToken] = plData.tokens
 
         intentsMgr.solve(mdl, ctx, typ)
 
@@ -117,7 +112,7 @@ class NCModelClientImpl(mdl: NCModel) extends LazyLogging:
       * @param usrId
       * @return
       */
-    def ask(txt: String, data: JMap[String, AnyRef], usrId: String): NCResult =
+    def ask(txt: String, data: Map[String, AnyRef], usrId: String): NCResult =
         ask0(txt, data, usrId, NCIntentSolveType.REGULAR).swap.toOption.get
 
     /**
@@ -131,7 +126,7 @@ class NCModelClientImpl(mdl: NCModel) extends LazyLogging:
       * @param usrId
       * @param filter
       */
-    def clearStm(usrId: String, filter: Predicate[NCEntity]): Unit = convMgr.getConversation(usrId).clear(filter)
+    def clearStm(usrId: String, filter: NCEntity => Boolean): Unit = convMgr.getConversation(usrId).clear(filter)
 
     /**
       *
@@ -143,7 +138,7 @@ class NCModelClientImpl(mdl: NCModel) extends LazyLogging:
       *
       * @param usrId
       */
-    def clearDialog(usrId: String, filter: Predicate[NCDialogFlowItem]): Unit = dlgMgr.clear(usrId, (i: NCDialogFlowItem) => filter.test(i))
+    def clearDialog(usrId: String, filter: NCDialogFlowItem => Boolean): Unit = dlgMgr.clear(usrId, (i: NCDialogFlowItem) => filter(i))
 
     /**
       *
@@ -152,7 +147,7 @@ class NCModelClientImpl(mdl: NCModel) extends LazyLogging:
         case class Result(intentId: String, text: String, error: Option[String], time: Long)
 
         val userId = UUID.randomUUID().toString
-        val res = mutable.ArrayBuffer.empty[Result]
+        val res = scala.collection.mutable.ArrayBuffer.empty[Result]
 
         def now: Long = System.currentTimeMillis()
 
@@ -164,7 +159,7 @@ class NCModelClientImpl(mdl: NCModel) extends LazyLogging:
                     try
                         val r = ask(sample, null, userId)
 
-                        Option.when(r.getIntentId != i.intent.id)(s"Unexpected intent ID: '${r.getIntentId}'")
+                        Option.when(r.intentId != i.intent.id)(s"Unexpected intent ID: '${r.intentId}'")
                     catch case e: Throwable =>
                         logger.warn("Unexpected error.", e) 
                         Option(e.getLocalizedMessage)
@@ -210,6 +205,6 @@ class NCModelClientImpl(mdl: NCModel) extends LazyLogging:
       * @param saveHist
       * @return
       */
-    def debugAsk(txt: String, data: JMap[String, AnyRef], usrId: String, saveHist: Boolean): NCCallbackData =
+    def debugAsk(txt: String, data: Map[String, AnyRef], usrId: String, saveHist: Boolean): NCCallbackData =
         import NCIntentSolveType.*
         ask0(txt, data, usrId, if saveHist then SEARCH else SEARCH_NO_HISTORY).toOption.get
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelConfig.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelConfig.java
deleted file mode 100644
index e8726298..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelConfig.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.time.Duration;
-import java.util.Objects;
-
-/**
- *
- */
-public class NCModelConfig extends NCPropertyMapAdapter {
-    public final long DFLT_CONV_TIMEOUT = Duration.ofMinutes(60).toMillis();
-    public final int DFLT_CONV_DEPTH = 3;
-
-    private final String id, name, ver, desc, origin;
-    private long convTimeout = DFLT_CONV_TIMEOUT;
-    private int convDepth = DFLT_CONV_DEPTH;
-
-    /**
-     * @param id
-     * @param name
-     * @param ver
-     */
-    public NCModelConfig(String id, String name, String ver) {
-        this(id, name, ver, null, null);
-    }
-
-    /**
-     *
-     * @param id
-     * @param name
-     * @param ver
-     * @param desc
-     * @param origin
-     */
-    public NCModelConfig(String id, String name, String ver, String desc, String origin) {
-        Objects.requireNonNull(id, "Model ID cannot be null.");
-        Objects.requireNonNull(name, "Model name cannot be null.");
-        Objects.requireNonNull(ver, "Model version cannot be null.");
-
-        this.id = id;
-        this.name = name;
-        this.ver = ver;
-        this.desc = desc;
-        this.origin = origin != null ? origin : getClass().getCanonicalName();
-    }
-
-    /**
-     * Gets unique, <i>immutable</i> ID of this model.
-     *
-     * @return Unique, <i>immutable</i> ID of this model.
-     */
-    public String getId() {
-        return id;
-    }
-
-    /**
-     * Gets descriptive name of this model.
-     *
-     * @return Descriptive name for this model.
-     */
-    public String getName() {
-        return name;
-    }
-
-    /**
-     * Gets the version of this model using semantic versioning.
-     *
-     * @return A version compatible with (<a href="http://www.semver.org">www.semver.org</a>) specification.
-     */
-    public String getVersion() {
-        return ver;
-    }
-
-    /**
-     * Gets optional short model description. This can be displayed by the management tools.
-     * Default implementation retusrns <code>null</code>.
-     *
-     * @return Optional short model description. Can return <code>null</code>.
-     */
-    public String getDescription() {
-        return desc;
-    }
-
-    /**
-     * Gets the origin of this model like name of the class, file path or URL.
-     * Default implementation return current class name.
-     *
-     * @return Origin of this model like name of the class, file path or URL.
-     */
-    public String getOrigin() {
-        return origin;
-    }
-
-    /**
-     *
-     * @return
-     */
-    public long getConversationTimeout() {
-        return convTimeout;
-    }
-
-    /**
-     *
-     * @param convTimeout
-     */
-    public void setConversationTimeout(long convTimeout) {
-        this.convTimeout = convTimeout;
-    }
-
-    /**
-     *
-     * @return
-     */
-    public int getConversationDepth() {
-        return convDepth;
-    }
-
-    /**
-     *
-     * @param convDepth
-     */
-    public void setConversationDepth(int convDepth) {
-        this.convDepth = convDepth;
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelAdapter.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelConfig.scala
similarity index 51%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelAdapter.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelConfig.scala
index e517ed83..28f77d43 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelAdapter.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelConfig.scala
@@ -15,37 +15,25 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft;
+package org.apache.nlpcraft
 
-import java.util.Objects;
+import java.time.Duration
 
-/**
- *
- */
-public class NCModelAdapter implements NCModel {
-    private final NCModelConfig cfg;
-    private final NCPipeline pipeline;
-
-    /**
-     *
-     * @param cfg
-     * @param pipeline
-     */
-    public NCModelAdapter(NCModelConfig cfg, NCPipeline pipeline) {
-        Objects.requireNonNull(cfg, "Model config cannot be null.");
-        Objects.requireNonNull(pipeline, "Model pipeline cannot be null.");
 
-        this.cfg = cfg;
-        this.pipeline = pipeline;
-    }
+object NCModelConfig:
+    val DFLT_CONV_TIMEOUT: Long = Duration.ofMinutes(60).toMillis
+    val DFLT_CONV_DEPTH = 3
 
-    @Override
-    public NCModelConfig getConfig() {
-        return cfg;
-    }
+    def apply(id: String, name: String, version: String) = new NCModelConfig(id, name, version)
+    def apply(id: String, name: String, version: String, description: String, origin: String) = new NCModelConfig(id, name, version, description, origin)
+import org.apache.nlpcraft.NCModelConfig.*
 
-    @Override
-    public NCPipeline getPipeline() {
-        return pipeline;
-    }
-}
+class NCModelConfig(
+    val id: String,
+    val name: String,
+    val version: String,
+    val description: String = null,
+    val origin: String = null,
+    var conversationTimeout: Long = DFLT_CONV_TIMEOUT,
+    var conversationDepth: Int = DFLT_CONV_DEPTH
+) extends NCPropertyMapAdapter
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipeline.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipeline.java
deleted file mode 100644
index be45a2f2..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipeline.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.util.Collections;
-import java.util.List;
-import java.util.Optional;
-
-/**
- * NLP processing pipeline for the input request. Pipeline is associated with the model.
- * <p>
- * An NLP pipeline is a container for various processing components that take the input text at the beginning of the
- * pipeline and produce the list of {@link NCEntity entities} at the end of the pipeline.
- * Schematically the pipeline looks like this:
- * <pre>
- *                                   +----------+        +-----------+
- * *=========*    +---------+    +---+-------+  |    +---+-------+   |
- * :  Text   : -> |  Token  | -> | Token     |  | -> | Token      |  | ----.
- * :  Input  :    |  Parser |    | Enrichers |--+    | Validators |--+      \
- * *=========*    +---------+    +-----------+       +------------+          \
- *                                                                            }
- *                    +-----------+        +----------+        +--------+    /
- * *=========*    +---+--------+  |    +---+-------+  |    +---+-----+  |   /
- * :  Entity : <- | Entity     |  | <- | Entity    |  | <- | Entity  |  | <-
- * :  List   :    | Validators |--+    | Enrichers |--+    | Parsers |--+
- * *=========*    +------------+       +-----------+       +---------+
- * </pre>
- * <p>
- * Pipeline has the following components:
- * <ul>
- *     <li>
- *         {@link NCTokenParser} is responsible for taking the input text and tokenize it into a list of
- *         {@link NCToken
- *         }. This process is called tokenization, i.e. the process of demarcating and
- *         classifying sections of a string of input characters. There's only one token parser for the pipeline.
- *     </li>
- *     <li>
- *         After the initial list of token is
- *     </li>
- * </ul>
- *
- *
- */
-public interface NCPipeline {
-    /**
-     *
-     * @return
-     */
-    NCTokenParser getTokenParser();
-
-    /**
-     * Gets the list of entity parser. At least one entity parser is required.
-     *
-     * @return
-     */
-    List<NCEntityParser> getEntityParsers();
-
-    /**
-     *
-     * @return
-     */
-    default List<NCTokenEnricher> getTokenEnrichers() {
-        return Collections.emptyList();
-    }
-
-    /**
-     *
-     * @return
-     */
-    default List<NCEntityEnricher> getEntityEnrichers() {
-        return Collections.emptyList();
-    }
-
-    /**
-     *
-     * @return
-     */
-    default List<NCTokenValidator> getTokenValidators() {
-        return Collections.emptyList();
-    }
-
-    /**
-     *
-     * @return
-     */
-    default List<NCEntityValidator> getEntityValidators() {
-        return Collections.emptyList();
-    }
-
-    /**
-     *
-     * @return
-     */
-    default Optional<NCVariantFilter> getVariantFilter() {
-        return Optional.empty();
-    }
-
-    /**
-     * Gets optional list of entity mappers.
-     *
-     * @return Optional list of entity mappers. Can be empty but never {@code null}.
-     */
-    default List<NCEntityMapper> getEntityMappers() {
-        return Collections.emptyList();
-    }
-}
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipeline.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipeline.scala
new file mode 100644
index 00000000..055fb654
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipeline.scala
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+/**
+  * NLP processing pipeline for the input request. Pipeline is associated with the model.
+  * <p>
+  * An NLP pipeline is a container for various processing components that take the input text at the beginning of the
+  * pipeline and produce the list of {@link NCEntity entities} at the end of the pipeline.
+  * Schematically the pipeline looks like this:
+  * <pre>
+  * +----------+        +-----------+
+  * *=========*    +---------+    +---+-------+  |    +---+-------+   |
+  * :  Text   : -> |  Token  | -> | Token     |  | -> | Token      |  | ----.
+  * :  Input  :    |  Parser |    | Enrichers |--+    | Validators |--+      \
+  * *=========*    +---------+    +-----------+       +------------+          \
+  * }
+  * +-----------+        +----------+        +--------+    /
+  * *=========*    +---+--------+  |    +---+-------+  |    +---+-----+  |   /
+  * :  Entity : <- | Entity     |  | <- | Entity    |  | <- | Entity  |  | <-
+  * :  List   :    | Validators |--+    | Enrichers |--+    | Parsers |--+
+  * *=========*    +------------+       +-----------+       +---------+
+  * </pre>
+  * <p>
+  * Pipeline has the following components:
+  * <ul>
+  * <li>
+  * {@link NCTokenParser} is responsible for taking the input text and tokenize it into a list of
+  * {@link NCToken
+  * }. This process is called tokenization, i.e. the process of demarcating and
+  * classifying sections of a string of input characters. There's only one token parser for the pipeline.
+  * </li>
+  * <li>
+  * After the initial list of token is
+  * </li>
+  * </ul>
+  *
+  */
+trait NCPipeline:
+    /**
+      *
+      * @return */
+    def getTokenParser: NCTokenParser
+
+    /**
+      * Gets the list of entity parser. At least one entity parser is required.
+      *
+      * @return */
+    def getEntityParsers: List[NCEntityParser]
+
+    def getTokenEnrichers: List[NCTokenEnricher] = List.empty
+
+    def getEntityEnrichers: List[NCEntityEnricher] = List.empty
+
+    def getTokenValidators: List[NCTokenValidator] = List.empty
+
+    def getEntityValidators: List[NCEntityValidator] = List.empty
+
+    def getVariantFilter: Option[NCVariantFilter] = None
+
+    /**
+      * Gets optional list of entity mappers.
+      *
+      * @return Optional list of entity mappers. Can be empty but never {@code null}. */
+    def getEntityMappers: List[NCEntityMapper] = List.empty
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipelineBuilder.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipelineBuilder.java
deleted file mode 100644
index 2e0ab5f1..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipelineBuilder.java
+++ /dev/null
@@ -1,368 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import opennlp.tools.stemmer.PorterStemmer;
-import org.apache.nlpcraft.internal.util.NCResourceReader;
-import org.apache.nlpcraft.nlp.entity.parser.semantic.NCSemanticElement;
-import org.apache.nlpcraft.nlp.entity.parser.semantic.NCSemanticEntityParser;
-import org.apache.nlpcraft.nlp.entity.parser.semantic.NCSemanticStemmer;
-import org.apache.nlpcraft.nlp.token.enricher.NCEnBracketsTokenEnricher;
-import org.apache.nlpcraft.nlp.token.enricher.NCEnDictionaryTokenEnricher;
-import org.apache.nlpcraft.nlp.token.enricher.NCEnQuotesTokenEnricher;
-import org.apache.nlpcraft.nlp.token.enricher.NCEnStopWordsTokenEnricher;
-import org.apache.nlpcraft.nlp.token.enricher.NCOpenNLPLemmaPosTokenEnricher;
-import org.apache.nlpcraft.nlp.token.enricher.NCEnSwearWordsTokenEnricher;
-import org.apache.nlpcraft.nlp.token.parser.NCOpenNLPTokenParser;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Optional;
-
-/**
- *
- */
-public class NCPipelineBuilder {
-    private NCTokenParser tokParser;
-    private final List<NCTokenEnricher> tokEnrichers = new ArrayList<>();
-    private final List<NCEntityEnricher> entEnrichers = new ArrayList<>();
-    private final List<NCEntityParser> entParsers = new ArrayList<>();
-    private final List<NCTokenValidator> tokVals = new ArrayList<>();
-    private final List<NCEntityValidator> entVals = new ArrayList<>();
-    private final List<NCEntityMapper> entMappers = new ArrayList<>();
-    private Optional<NCVariantFilter> varFilter = Optional.empty();
-
-    /**
-     *
-     * @return
-     */
-    private static NCSemanticStemmer mkEnStemmer() {
-        return new NCSemanticStemmer() {
-            private final PorterStemmer ps = new PorterStemmer();
-
-            @Override
-            public synchronized String stem(String txt) {
-                return ps.stem(txt);
-            }
-        };
-    }
-
-    /**
-     *
-     * @return
-     */
-    private NCOpenNLPTokenParser mkEnOpenNLPTokenParser() {
-        return new NCOpenNLPTokenParser(NCResourceReader.getPath("opennlp/en-token.bin"));
-    }
-
-
-    /**
-     * @param tokEnrichers
-     * @return This instance for call chaining.
-     */
-    public NCPipelineBuilder withTokenEnrichers(List<NCTokenEnricher> tokEnrichers) {
-        Objects.requireNonNull(tokEnrichers, "List of token enrichers cannot be null.");
-        tokEnrichers.forEach(p -> Objects.requireNonNull(p, "Token enricher cannot be null."));
-
-        this.tokEnrichers.addAll(tokEnrichers);
-
-        return this;
-    }
-
-    /**
-     * @param tokEnricher
-     * @return This instance for call chaining.
-     */
-    public NCPipelineBuilder withTokenEnricher(NCTokenEnricher tokEnricher) {
-        Objects.requireNonNull(tokEnricher, "Token enricher cannot be null.");
-
-        this.tokEnrichers.add(tokEnricher);
-
-        return this;
-    }
-
-    /**
-     * @param entEnrichers
-     * @return This instance for call chaining.
-     */
-    public NCPipelineBuilder withEntityEnrichers(List<NCEntityEnricher> entEnrichers) {
-        Objects.requireNonNull(entEnrichers, "List of entity enrichers cannot be null.");
-        entEnrichers.forEach(p -> Objects.requireNonNull(p, "Entity enrichers cannot be null."));
-
-        this.entEnrichers.addAll(entEnrichers);
-
-        return this;
-    }
-
-    /**
-     * @param entEnricher
-     * @return This instance for call chaining.
-     */
-    public NCPipelineBuilder withEntityEnricher(NCEntityEnricher entEnricher) {
-        Objects.requireNonNull(entEnricher, "Entity enricher cannot be null.");
-
-        this.entEnrichers.add(entEnricher);
-
-        return this;
-    }
-
-    /**
-     * @param entParsers
-     * @return This instance for call chaining.
-     */
-    public NCPipelineBuilder withEntityParsers(List<NCEntityParser> entParsers) {
-        Objects.requireNonNull(entParsers, "List of entity parsers cannot be null.");
-        entParsers.forEach(p -> Objects.requireNonNull(p, "Entity parser cannot be null."));
-
-        this.entParsers.addAll(entParsers);
-
-        return this;
-    }
-
-    /**
-     * @param entParser
-     * @return This instance for call chaining.
-     */
-    public NCPipelineBuilder withEntityParser(NCEntityParser entParser) {
-        Objects.requireNonNull(entParser, "Entity parser cannot be null.");
-
-        this.entParsers.add(entParser);
-
-        return this;
-    }
-
-    /**
-     * @param tokVals
-     * @return This instance for call chaining.
-     */
-    public NCPipelineBuilder withTokenValidators(List<NCTokenValidator> tokVals) {
-        Objects.requireNonNull(tokVals, "List of token validators cannot be null.");
-        tokVals.forEach(p -> Objects.requireNonNull(p, "Token validator cannot be null."));
-
-        this.tokVals.addAll(tokVals);
-
-        return this;
-    }
-
-    /**
-     * @param tokVal
-     * @return This instance for call chaining.
-     */
-    public NCPipelineBuilder withTokenValidator(NCTokenValidator tokVal) {
-        Objects.requireNonNull(tokVal, "Token validator cannot be null.");
-
-        this.tokVals.add(tokVal);
-
-        return this;
-    }
-
-    /**
-     * @param entVals
-     * @return This instance for call chaining.
-     */
-    public NCPipelineBuilder withEntityValidators(List<NCEntityValidator> entVals) {
-        Objects.requireNonNull(entVals, "List of entity validators cannot be null.");
-        entVals.forEach(p -> Objects.requireNonNull(p, "Entity validators cannot be null."));
-
-        this.entVals.addAll(entVals);
-
-        return this;
-    }
-
-    /**
-     * @param entVal
-     * @return This instance for call chaining.
-     */
-    public NCPipelineBuilder withEntityValidator(NCEntityValidator entVal) {
-        Objects.requireNonNull(entVal, "Entity validator cannot be null.");
-
-        this.entVals.add(entVal);
-
-        return this;
-    }
-
-    /**
-     * @param varFilter
-     * @return This instance for call chaining.
-     */
-    public NCPipelineBuilder withVariantFilter(NCVariantFilter varFilter) {
-        this.varFilter = Optional.of(varFilter);
-
-        return this;
-    }
-
-    /**
-     *
-     * @param tokParser
-     * @return
-     */
-    public NCPipelineBuilder withTokenParser(NCTokenParser tokParser) {
-        Objects.requireNonNull(tokParser, "Token parser cannot be null.");
-
-        this.tokParser = tokParser;
-
-        return this;
-    }
-
-    /**
-     *
-     * @param entMappers
-     * @return This instance for call chaining.
-     */
-    public NCPipelineBuilder withEntityMappers(List<NCEntityMapper> entMappers) {
-        Objects.requireNonNull(entMappers, "List of entity mappers cannot be null.");
-        entMappers.forEach(p -> Objects.requireNonNull(p, "Entity mapper cannot be null."));
-
-        this.entMappers.addAll(entMappers);
-
-        return this;
-    }
-
-    /**
-     * @param entMapper
-     * @return This instance for call chaining.
-     */
-    public NCPipelineBuilder withEntityMapper(NCEntityMapper entMapper) {
-        Objects.requireNonNull(entMapper, "Entity mapper cannot be null.");
-
-        this.entMappers.add(entMapper);
-
-        return this;
-    }
-    /**
-     *
-     */
-    private void setEnComponents() {
-        tokParser = mkEnOpenNLPTokenParser();
-
-        tokEnrichers.add(new NCOpenNLPLemmaPosTokenEnricher(
-            NCResourceReader.getPath("opennlp/en-pos-maxent.bin"),
-            NCResourceReader.getPath("opennlp/en-lemmatizer.dict")
-        ));
-        tokEnrichers.add(new NCEnStopWordsTokenEnricher());
-
-        tokEnrichers.add(new NCEnSwearWordsTokenEnricher(NCResourceReader.getPath("badfilter/swear_words.txt")));
-        tokEnrichers.add(new NCEnQuotesTokenEnricher());
-        tokEnrichers.add(new NCEnDictionaryTokenEnricher());
-        tokEnrichers.add(new NCEnBracketsTokenEnricher());
-    }
-
-    /**
-     *
-     * @param lang
-     * @param macros
-     * @param elms
-     * @return
-     */
-    public NCPipelineBuilder withSemantic(String lang, Map<String, String> macros, List<NCSemanticElement> elms) {
-        Objects.requireNonNull(lang, "Language cannot be null.");
-        Objects.requireNonNull(elms, "Model elements cannot be null.");
-        if (elms.isEmpty()) throw new IllegalArgumentException("Model elements cannot be empty.");
-
-        switch (lang.toUpperCase()) {
-            case "EN":
-                setEnComponents();
-
-                this.entParsers.add(new NCSemanticEntityParser(mkEnStemmer(), mkEnOpenNLPTokenParser(), macros, elms));
-
-                break;
-
-            default:
-                throw new IllegalArgumentException("Unsupported language: " + lang);
-        }
-
-        return this;
-    }
-
-    /**
-     *
-     * @param lang
-     * @param elms
-     * @return
-     */
-    public NCPipelineBuilder withSemantic(String lang, List<NCSemanticElement> elms) {
-        return withSemantic(lang, null, elms);
-    }
-
-    /**
-     *
-     * @param lang
-     * @param src
-     * @return
-     */
-    public NCPipelineBuilder withSemantic(String lang, String src) {
-        Objects.requireNonNull(lang, "Language cannot be null.");
-        Objects.requireNonNull(src, "Model source cannot be null.");
-
-        switch (lang.toUpperCase()) {
-            case "EN":
-                setEnComponents();
-
-                this.entParsers.add(new NCSemanticEntityParser(mkEnStemmer(), mkEnOpenNLPTokenParser(), src));
-
-                break;
-
-            default:
-                throw new IllegalArgumentException("Unsupported language: " + lang);
-        }
-
-        return this;
-    }
-
-
-    /**
-     * @return
-     */
-    public NCPipeline build() {
-        Objects.requireNonNull(tokParser, "Token parser cannot be null.");
-
-        // TODO: Text.
-        if (entParsers.isEmpty()) throw new IllegalStateException("At least oe entity parser should be defined.");
-
-        return new NCPipeline() {
-            @Override public NCTokenParser getTokenParser() {
-                return tokParser;
-            }
-            @Override public List<NCTokenEnricher> getTokenEnrichers() {
-                return tokEnrichers;
-            }
-            @Override public List<NCEntityEnricher> getEntityEnrichers() {
-                return entEnrichers;
-            }
-            @Override public List<NCEntityParser> getEntityParsers() {
-                return entParsers;
-            }
-            @Override public List<NCTokenValidator> getTokenValidators() {
-                return tokVals;
-            }
-            @Override public List<NCEntityValidator> getEntityValidators() {
-                return entVals;
-            }
-            @Override public Optional<NCVariantFilter> getVariantFilter() {
-                return varFilter;
-            }
-
-            @Override
-            public List<NCEntityMapper> getEntityMappers() {
-                return entMappers;
-            }
-        };
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipelineBuilder.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipelineBuilder.scala
new file mode 100644
index 00000000..2026ae07
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipelineBuilder.scala
@@ -0,0 +1,238 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+import opennlp.tools.stemmer.PorterStemmer
+import org.apache.nlpcraft.internal.util.NCResourceReader
+import org.apache.nlpcraft.nlp.entity.parser.semantic.*
+import org.apache.nlpcraft.nlp.token.enricher.*
+import org.apache.nlpcraft.nlp.token.parser.NCOpenNLPTokenParser
+
+import java.util.Objects
+
+class NCPipelineBuilder:
+    import scala.collection.mutable.ArrayBuffer as Buf
+
+    private var tokParser: Option[NCTokenParser] = None
+    private val tokEnrichers: Buf[NCTokenEnricher] = Buf.empty
+    private val entEnrichers: Buf[NCEntityEnricher] = Buf.empty
+    private val entParsers: Buf[NCEntityParser] = Buf.empty
+    private val tokVals: Buf[NCTokenValidator] = Buf.empty
+    private val entVals: Buf[NCEntityValidator] = Buf.empty
+    private val entMappers: Buf[NCEntityMapper] = Buf.empty
+    private var varFilter: Option[NCVariantFilter] = None
+
+    /**
+      *
+      * @return */
+    private def mkEnStemmer: NCSemanticStemmer =
+        new NCSemanticStemmer:
+            final private val ps: PorterStemmer = new PorterStemmer
+            override def stem(txt: String): String = ps.stem(txt)
+
+    private def mkEnOpenNLPTokenParser: NCOpenNLPTokenParser =
+        new NCOpenNLPTokenParser(NCResourceReader.getPath("opennlp/en-token.bin"))
+
+    /**
+      * @param tokEnrichers
+      * @return This instance for call chaining. */
+    def withTokenEnrichers(tokEnrichers: List[NCTokenEnricher]): NCPipelineBuilder =
+        Objects.requireNonNull(tokEnrichers, "List of token enrichers cannot be null.")
+        tokEnrichers.foreach((p: NCTokenEnricher) => Objects.requireNonNull(p, "Token enricher cannot be null."))
+        this.tokEnrichers ++= tokEnrichers
+        this
+
+    /**
+      * @param tokEnricher
+      * @return This instance for call chaining. */
+    def withTokenEnricher(tokEnricher: NCTokenEnricher): NCPipelineBuilder =
+        Objects.requireNonNull(tokEnricher, "Token enricher cannot be null.")
+        this.tokEnrichers += tokEnricher
+        this
+
+    /**
+      * @param entEnrichers
+      * @return This instance for call chaining. */
+    def withEntityEnrichers(entEnrichers: List[NCEntityEnricher]): NCPipelineBuilder =
+        Objects.requireNonNull(entEnrichers, "List of entity enrichers cannot be null.")
+        entEnrichers.foreach((p: NCEntityEnricher) => Objects.requireNonNull(p, "Entity enrichers cannot be null."))
+        this.entEnrichers ++= entEnrichers
+        this
+
+    /**
+      * @param entEnricher
+      * @return This instance for call chaining. */
+    def withEntityEnricher(entEnricher: NCEntityEnricher): NCPipelineBuilder = 
+        Objects.requireNonNull(entEnricher, "Entity enricher cannot be null.")
+        this.entEnrichers += entEnricher
+        this
+
+    /**
+      * @param entParsers
+      * @return This instance for call chaining. */
+    def withEntityParsers(entParsers: List[NCEntityParser]): NCPipelineBuilder =
+        Objects.requireNonNull(entParsers, "List of entity parsers cannot be null.")
+        entParsers.foreach((p: NCEntityParser) => Objects.requireNonNull(p, "Entity parser cannot be null."))
+        this.entParsers ++= entParsers
+        this
+
+    /**
+      * @param entParser
+      * @return This instance for call chaining. */
+    def withEntityParser(entParser: NCEntityParser): NCPipelineBuilder =
+        Objects.requireNonNull(entParser, "Entity parser cannot be null.")
+        this.entParsers += entParser
+        this
+
+    /**
+      * @param tokVals
+      * @return This instance for call chaining. */
+    def withTokenValidators(tokVals: List[NCTokenValidator]): NCPipelineBuilder =
+        Objects.requireNonNull(tokVals, "List of token validators cannot be null.")
+        tokVals.foreach((p: NCTokenValidator) => Objects.requireNonNull(p, "Token validator cannot be null."))
+        this.tokVals ++= tokVals
+        this
+
+
+    /**
+      * @param tokVal
+      * @return This instance for call chaining. */
+    def withTokenValidator(tokVal: NCTokenValidator): NCPipelineBuilder =
+        Objects.requireNonNull(tokVal, "Token validator cannot be null.")
+        this.tokVals += tokVal
+        this
+
+    /**
+      * @param entVals
+      * @return This instance for call chaining. */
+    def withEntityValidators(entVals: List[NCEntityValidator]): NCPipelineBuilder =
+        Objects.requireNonNull(entVals, "List of entity validators cannot be null.")
+        entVals.foreach((p: NCEntityValidator) => Objects.requireNonNull(p, "Entity validators cannot be null."))
+        this.entVals ++= entVals
+        this
+
+    /**
+      * @param entVal
+      * @return This instance for call chaining. */
+    def withEntityValidator(entVal: NCEntityValidator): NCPipelineBuilder =
+        Objects.requireNonNull(entVal, "Entity validator cannot be null.")
+        this.entVals += entVal
+        this
+
+    /**
+      * @param varFilter
+      * @return This instance for call chaining. */
+    def withVariantFilter(varFilter: NCVariantFilter): NCPipelineBuilder =
+        this.varFilter = Some(varFilter)
+        this
+
+    /**
+      *
+      * @param tokParser
+      * @return */
+    def withTokenParser(tokParser: NCTokenParser): NCPipelineBuilder =
+        Objects.requireNonNull(tokParser, "Token parser cannot be null.")
+        this.tokParser = Some(tokParser)
+        this
+
+    /**
+      *
+      * @param entMappers
+      * @return This instance for call chaining. */
+    def withEntityMappers(entMappers: List[NCEntityMapper]): NCPipelineBuilder =
+        Objects.requireNonNull(entMappers, "List of entity mappers cannot be null.")
+        entMappers.foreach((p: NCEntityMapper) => Objects.requireNonNull(p, "Entity mapper cannot be null."))
+        this.entMappers ++= entMappers
+        this
+
+    /**
+      * @param entMapper
+      * @return This instance for call chaining. */
+    def withEntityMapper(entMapper: NCEntityMapper): NCPipelineBuilder =
+        Objects.requireNonNull(entMapper, "Entity mapper cannot be null.")
+        this.entMappers += entMapper
+        this
+
+    /**
+      * */
+    private def setEnComponents(): Unit =
+        tokParser = Some(mkEnOpenNLPTokenParser)
+        tokEnrichers += new NCOpenNLPLemmaPosTokenEnricher(NCResourceReader.getPath("opennlp/en-pos-maxent.bin"), NCResourceReader.getPath("opennlp/en-lemmatizer.dict"))
+        tokEnrichers += new NCEnStopWordsTokenEnricher
+        tokEnrichers += new NCEnSwearWordsTokenEnricher(NCResourceReader.getPath("badfilter/swear_words.txt"))
+        tokEnrichers += new NCEnQuotesTokenEnricher
+        tokEnrichers += new NCEnDictionaryTokenEnricher
+        tokEnrichers += new NCEnBracketsTokenEnricher
+
+    /**
+      *
+      * @param lang
+      * @param macros
+      * @param elms
+      * @return */
+    def withSemantic(lang: String, macros: Map[String, String], elms: List[NCSemanticElement]): NCPipelineBuilder =
+        Objects.requireNonNull(lang, "Language cannot be null.")
+        Objects.requireNonNull(elms, "Model elements cannot be null.")
+        if elms.isEmpty then throw new IllegalArgumentException("Model elements cannot be empty.")
+
+        lang.toUpperCase match
+            case "EN" =>
+                setEnComponents()
+                this.entParsers += new NCSemanticEntityParser(mkEnStemmer, mkEnOpenNLPTokenParser, macros, elms)
+            case _ => throw new IllegalArgumentException("Unsupported language: " + lang)
+        this
+
+    /**
+      *
+      * @param lang
+      * @param elms
+      * @return */
+    def withSemantic(lang: String, elms: List[NCSemanticElement]): NCPipelineBuilder = withSemantic(lang, null, elms)
+
+    /**
+      *
+      * @param lang
+      * @param src
+      * @return */
+    def withSemantic(lang: String, src: String): NCPipelineBuilder =
+        Objects.requireNonNull(lang, "Language cannot be null.")
+        Objects.requireNonNull(src, "Model source cannot be null.")
+        lang.toUpperCase match
+            case "EN" =>
+                setEnComponents()
+                this.entParsers += new NCSemanticEntityParser(mkEnStemmer, mkEnOpenNLPTokenParser, mdlSrc = src)
+            case _ => throw new IllegalArgumentException("Unsupported language: " + lang)
+        this
+
+
+    /**
+      * @return */
+    def build: NCPipeline =
+        // TODO: Text.
+        if tokParser.isEmpty then throw new IllegalArgumentException("Token parser cannot be null.")
+        if entParsers.isEmpty then throw new IllegalStateException("At least oe entity parser should be defined.")
+
+        new NCPipeline():
+            override def getTokenParser: NCTokenParser = tokParser.get
+            override def getTokenEnrichers: List[NCTokenEnricher] = tokEnrichers.toList
+            override def getEntityEnrichers: List[NCEntityEnricher] = entEnrichers.toList
+            override def getEntityParsers: List[NCEntityParser] = entParsers.toList
+            override def getTokenValidators: List[NCTokenValidator] = tokVals.toList
+            override def getEntityValidators: List[NCEntityValidator] = entVals.toList
+            override def getVariantFilter: Option[NCVariantFilter] = varFilter
+            override def getEntityMappers: List[NCEntityMapper] = entMappers.toList
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMap.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMap.java
deleted file mode 100644
index f13fa171..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMap.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.util.Optional;
-import java.util.Set;
-
-/**
- * Map-like container that provides support for mutable runtime-only propertes or metadata.
- *
- * @see NCPropertyMapAdapter
- * @see NCToken
- * @see NCEntity
- */
-public interface NCPropertyMap {
-    /**
-     * Returns the value to which the specified key is mapped, or {@code null} if this map contains no mapping for the key.
-     *
-     * @param key The key whose associated value is to be returned.
-     * @param <T> Type of the returned value.
-     * @return The value to which the specified key is mapped, or {@code null} if this map contains no mapping for the key.
-     */
-    <T> T get(String key);
-
-    /**
-     * Returns the value to which the specified key is mapped as an optional. This method is equivalent to:
-     * <pre class="brush: java">
-     *     return Optional.ofNullable((T)map.get(key));
-     * </pre>
-     *
-     * @param key The key whose associated value is to be returned.
-     * @param <T> Type of the returned value.
-     * @return The value to which the specified key is mapped as an optional.
-     */
-    <T> Optional<T> getOpt(String key);
-
-    /**
-     * Associates the specified value with the specified key in this map. If the map previously contained a mapping
-     * for the key, the old value is replaced by the specified value.
-     *
-     * @param key Key with which the specified value is to be associated.
-     * @param obj Value to be associated with the specified key.
-     * @param <T> Type of the value.
-     * @return The previous value associated with key, or {@code null} if there was no mapping for key.
-     */
-    <T> T put(String key, Object obj);
-
-    /**
-     * If the specified key is not already associated with a value (or is mapped to {@code null}) associates it with
-     * the given value and returns {@code null}, else returns the current value.
-     *
-     * @param key Key with which the specified value is to be associate
-     * @param obj Value to be associated with the specified key
-     * @param <T> Type of the value.
-     * @return The previous value associated with the specified key, or {@code null} if there was no mapping for the key.
-     */
-    <T> T putIfAbsent(String key, T obj);
-
-    /**
-     * Returns {@code true} if this map contains a mapping for the specified key.
-     *
-     * @return {@code true} if this map contains a mapping for the specified key.
-     */
-    boolean contains(String key);
-
-    /**
-     * Removes the mapping for a key from this map if it is present.
-     *
-     * @param key Key whose mapping is to be removed from the map.
-     * @param <T> Type of the value.
-     * @return The previous value associated with key, or {@code null} if there was no mapping for key.
-     */
-    <T> T remove(String key);
-
-    /**
-     * Removes the entry for the specified key only if it is currently mapped to the specified value.
-     *
-     * @param key Key with which the specified value is associated value.
-     * @param obj Value expected to be associated with the specified key.
-     * @return {@code true} if the value was removed
-     */
-    boolean remove(String key, Object obj);
-
-    /**
-     * Returns a set view of the keys contained in this map.
-     *
-     * @return A set view of the keys contained in this map
-     */
-    Set<String> keysSet();
-
-    /**
-     * Removes all of the mappings from this map. The map will be empty after this call returns.
-     */
-    void clear();
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMap.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMap.scala
new file mode 100644
index 00000000..ee1ab5f3
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMap.scala
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+/**
+  * Map-like container that provides support for mutable runtime-only propertes or metadata.
+  *
+  * @see NCPropertyMapAdapter
+  * @see NCToken
+  * @see NCEntity */
+trait NCPropertyMap:
+    /**
+      * Returns the value to which the specified key is mapped, or {@code null} if this map contains no mapping for the key.
+      *
+      * @param key The key whose associated value is to be returned.
+      * @param <T> Type of the returned value.
+      * @return The value to which the specified key is mapped, or {@code null} if this map contains no mapping for the key. */
+    def get[T](key: String): T
+
+    /**
+      * Returns the value to which the specified key is mapped as an optional. This method is equivalent to:
+      * <pre class="brush: java">
+      * return Optional.ofNullable((T)map.get(key));
+      * </pre>
+      *
+      * @param key The key whose associated value is to be returned.
+      * @param <T> Type of the returned value.
+      * @return The value to which the specified key is mapped as an optional. */
+    def getOpt[T](key: String): Option[T]
+
+    /**
+      * Associates the specified value with the specified key in this map. If the map previously contained a mapping
+      * for the key, the old value is replaced by the specified value.
+      *
+      * @param key Key with which the specified value is to be associated.
+      * @param obj Value to be associated with the specified key.
+      * @param <T> Type of the value.
+      * @return The previous value associated with key, or {@code null} if there was no mapping for key. */
+    def put[T](key: String, obj: Any): T
+
+    /**
+      * If the specified key is not already associated with a value (or is mapped to {@code null}) associates it with
+      * the given value and returns {@code null}, else returns the current value.
+      *
+      * @param key Key with which the specified value is to be associate
+      * @param obj Value to be associated with the specified key
+      * @param <T> Type of the value.
+      * @return The previous value associated with the specified key, or {@code null} if there was no mapping for the key. */
+    def putIfAbsent[T](key: String, obj: T): T
+
+    /**
+      * Returns {@code true} if this map contains a mapping for the specified key.
+      *
+      * @return {@code true} if this map contains a mapping for the specified key. */
+    def contains(key: String): Boolean
+
+    /**
+      * Removes the mapping for a key from this map if it is present.
+      *
+      * @param key Key whose mapping is to be removed from the map.
+      * @param <T> Type of the value.
+      * @return The previous value associated with key, or {@code null} if there was no mapping for key. */
+    def remove[T](key: String): T
+
+    /**
+      * Removes the entry for the specified key only if it is currently mapped to the specified value.
+      *
+      * @param key Key with which the specified value is associated value.
+      * @param obj Value expected to be associated with the specified key.
+      * @return {@code true} if the value was removed */
+    def remove(key: String, obj: Any): Boolean
+
+    /**
+      * Returns a set view of the keys contained in this map.
+      *
+      * @return A set view of the keys contained in this map */
+    def keysSet: Set[String]
+
+    /**
+      * Removes all of the mappings from this map. The map will be empty after this call returns. */
+    def clear(): Unit
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMapAdapter.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMapAdapter.java
deleted file mode 100644
index a9f6f6c3..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMapAdapter.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-
-/**
- * Convenient adapter for {@link NCPropertyMap} interface.
- */
-@SuppressWarnings("unchecked")
-public class NCPropertyMapAdapter implements NCPropertyMap {
-    private final Map<String, Object> map = new ConcurrentHashMap<>();
-
-    @Override
-    public <T> T get(String key) {
-        return (T)map.get(key);
-    }
-
-    @Override
-    public <T> Optional<T> getOpt(String key) {
-        return Optional.ofNullable((T)map.get(key));
-    }
-
-    @Override
-    public <T> T put(String key, Object obj) {
-        return (T)map.put(key, obj);
-    }
-
-    @Override
-    public <T> T putIfAbsent(String key, T obj) {
-        return (T)map.putIfAbsent(key, obj);
-    }
-
-    @Override
-    public boolean contains(String key) {
-        return map.containsKey(key);
-    }
-
-    @Override
-    public <T> T remove(String key) {
-        return (T)map.remove(key);
-    }
-
-    @Override
-    public boolean remove(String key, Object obj) {
-        return map.remove(key, obj);
-    }
-
-    @Override
-    public Set<String> keysSet() {
-        return map.keySet();
-    }
-
-    @Override
-    public void clear() {
-        map.clear();
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMapAdapter.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMapAdapter.scala
new file mode 100644
index 00000000..3a05cec8
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMapAdapter.scala
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+import scala.jdk.CollectionConverters.*
+import java.util.concurrent.ConcurrentHashMap
+
+/**
+  * Convenient adapter for {@link NCPropertyMap} interface. */
+class NCPropertyMapAdapter extends NCPropertyMap:
+    private val map = new ConcurrentHashMap[String, Any]
+
+    // TODO: or error?
+    def get[T](key: String): T = getOpt(key).orNull.asInstanceOf[T]
+
+    def getOpt[T](key: String): Option[T] =
+        map.get(key) match
+            case null => None
+            case x => Some(x.asInstanceOf[T])
+
+    def put[T](key: String, obj: Any): T = map.put(key, obj).asInstanceOf[T]
+
+    def putIfAbsent[T](key: String, obj: T): T = map.putIfAbsent(key, obj).asInstanceOf[T]
+
+    def contains(key: String): Boolean = map.containsKey(key)
+
+    def remove[T](key: String): T = map.remove(key).asInstanceOf[T]
+
+    def remove(key: String, obj: Any): Boolean = map.remove(key, obj)
+
+    def keysSet = map.keys().asScala.toSet
+
+    def clear(): Unit = map.clear()
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCRejection.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCRejection.java
deleted file mode 100644
index f0638956..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCRejection.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-/**
- * Exception to indicate that user input cannot be processed as is. This exception can be thrown from
- * intent callbacks.
- * <p>
- * This exception typically indicates that user has not provided enough information in the input string
- * to have it processed automatically. In most cases this means that the user's input is either too short
- * or too simple, too long or too complex, missing required context, or unrelated to requested data model.
- */
-public class NCRejection extends NCException {
-    /**
-     * Creates new rejection exception with given message.
-     *
-     * @param msg Rejection message.
-     */
-    public NCRejection(String msg) {
-        super(msg);
-    }
-
-    /**
-     * Creates new rejection exception with given message and cause.
-     *
-     * @param msg Rejection message.
-     * @param cause Cause of this exception.
-     */
-    public NCRejection(String msg, Throwable cause) {
-        super(msg, cause);
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCResultType.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCRejection.scala
similarity index 58%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/NCResultType.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/NCRejection.scala
index 400d4067..6a08daff 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCResultType.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCRejection.scala
@@ -15,19 +15,13 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft;
+package org.apache.nlpcraft
 
 /**
- *
- */
-public enum NCResultType {
-    /**
-     * Final result is ready.
-     */
-    ASK_RESULT,
-
-    /**
-     * Ask user back engaging in dialog.
-     */
-    ASK_DIALOG
-}
+  * Exception to indicate that user input cannot be processed as is. This exception can be thrown from
+  * intent callbacks.
+  * <p>
+  * This exception typically indicates that user has not provided enough information in the input string
+  * to have it processed automatically. In most cases this means that the user's input is either too short
+  * or too simple, too long or too complex, missing required context, or unrelated to requested data model. */
+class NCRejection(msg: String, cause: Throwable = null) extends RuntimeException(msg, cause)
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCRequest.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCRequest.java
deleted file mode 100644
index 9dc55876..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCRequest.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.util.Map;
-
-/**
- * Descriptor for the input user request.
- *
- * @see NCContext#getRequest()
- */
-public interface NCRequest {
-    /**
-     * Gets ID of the user on behalf of which this request was submitted. User ID is used by
-     * NLPCraft to manage the conversation state. It can be any value as long as it is constant
-     * and globally unique for the given user.
-     *
-     * @return User ID.
-     */
-    String getUserId();
-
-    /**
-     * Gets globally unique ID of the current request.
-     * <p>
-     * A request is defined as a processing of a one user input request.
-     * Note that the model can be accessed multiple times during processing of a single user request
-     * and therefore multiple instances of this interface can return the same request ID. In fact, users
-     * of this interfaces can use this fact by using this ID, for example, as a map key for a session
-     * scoped storage.
-     *
-     * @return Request ID.
-     */
-    String getRequestId();
-
-    /**
-     *
-     * @return
-     */
-    String getText();
-
-    /**
-     * Gets UTC/GMT timestamp in millis when user input was received.
-     *
-     * @return UTC/GMT timestamp in ms when user input was received.
-     */
-    long getReceiveTimestamp();
-
-    /**
-     * Gets optional user request data.
-     *
-     * @return Optional user request data, can be empty but never {@code null}.
-     */
-    Map<String, Object> getRequestData();
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCRequest.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCRequest.scala
new file mode 100644
index 00000000..25136210
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCRequest.scala
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+/**
+  * Descriptor for the input user request.
+  *
+  * @see NCContext#getRequest() */
+trait NCRequest:
+    /**
+      * Gets ID of the user on behalf of which this request was submitted. User ID is used by
+      * NLPCraft to manage the conversation state. It can be any value as long as it is constant
+      * and globally unique for the given user.
+      *
+      * @return User ID. */
+    def getUserId: String
+
+    /**
+      * Gets globally unique ID of the current request.
+      * <p>
+      * A request is defined as a processing of a one user input request.
+      * Note that the model can be accessed multiple times during processing of a single user request
+      * and therefore multiple instances of this interface can return the same request ID. In fact, users
+      * of this interfaces can use this fact by using this ID, for example, as a map key for a session
+      * scoped storage.
+      *
+      * @return Request ID. */
+    def getRequestId: String
+
+    /**
+      * @return */
+    def getText: String
+
+    /**
+      * Gets UTC/GMT timestamp in millis when user input was received.
+      *
+      * @return UTC/GMT timestamp in ms when user input was received. */
+    def getReceiveTimestamp: Long
+
+    /**
+      * Gets optional user request data.
+      *
+      * @return Optional user request data, can be empty but never {@code null}. */
+    def getRequestData: Map[String, Any]
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCResult.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCResult.java
deleted file mode 100644
index 5c5dfc31..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCResult.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.io.Serializable;
-
-/**
- *
- */
-public class NCResult implements Serializable {
-    /** Rresult text. */
-    private Object body;
-
-    /** Result type. */
-    private NCResultType type;
-
-    /** ID of the intent. */
-    private String intentId;
-
-    /**
-     * Creates new result with given body and type.
-     *
-     * @param body Result body.
-     * @param type Result type.
-     */
-    public NCResult(Object body, NCResultType type) {
-        assert body != null;
-        assert type != null;
-
-        this.body = body;
-        this.type = type;
-    }
-
-    /**
-     * No-arg constructor.
-     */
-    public NCResult() {
-        // No-op.
-    }
-
-    /**
-     * Sets result body.
-     *
-     * @param body Result body.
-     */
-    public void setBody(Object body) {
-        this.body = body;
-    }
-
-    /**
-     * Set result type.
-     *
-     * @param type Result type.
-     */
-    public void setType(NCResultType type) {
-        this.type = type;
-    }
-
-    /**
-     * Gets result type.
-     *
-     * @return Result type.
-     */
-    public NCResultType getType() {
-        return type;
-    }
-
-    /**
-     * Gets result body.
-     *
-     * @return Result body.
-     */
-    public Object getBody() {
-        return body;
-    }
-
-    /**
-     * Get optional intent ID.
-     *
-     * @return Intent ID or {@code null} if intent ID is not available.
-     */
-    public String getIntentId() {
-        return intentId;
-    }
-
-    /**
-     * Sets optional intent ID.
-     *
-     * @param intentId Intent ID to set for this result.
-     */
-    public void setIntentId(String intentId) {
-        this.intentId = intentId;
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCCallbackData.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCResult.scala
similarity index 62%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/NCCallbackData.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/NCResult.scala
index d6a3db20..adde4903 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCCallbackData.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCResult.scala
@@ -15,30 +15,13 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft;
+package org.apache.nlpcraft
 
-import java.util.List;
-import java.util.function.Function;
+import org.apache.nlpcraft.NCResultType.*
 
-/**
- *
- */
-public interface NCCallbackData {
-    /**
-     *
-     * @return
-     */
-    String getIntentId();
-
-    /**
-     *
-     * @return
-     */
-    List<List<NCEntity>> getCallbackArguments();
+object NCResult:
+    def apply(): NCResult = new NCResult()
+    def apply(body: Any, resultType: NCResultType): NCResult = new NCResult(body = body, resultType = resultType)
+    def apply(body: Any, resultType: NCResultType, intentId: String): NCResult = new NCResult(body = body, resultType =resultType, intentId)
 
-    /**
-     *
-     * @return
-     */
-    Function<List<List<NCEntity>>, NCResult> getCallback();
-}
+class NCResult(var body: Any = null, var resultType: NCResultType = null, var intentId: String = null)
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticStemmer.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCResultType.scala
similarity index 81%
copy from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticStemmer.java
copy to nlpcraft/src/main/scala/org/apache/nlpcraft/NCResultType.scala
index 279e4f4d..f1b7b486 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticStemmer.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCResultType.scala
@@ -15,16 +15,9 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.nlp.entity.parser.semantic;
+package org.apache.nlpcraft
 
-/**
- * 
- */
-public interface NCSemanticStemmer {
+enum NCResultType:
     /**
-     *
-     * @param txt
-     * @return
-     */
-    String stem(String txt);
-}
+      * Final result is ready and Ask user back engaging in dialog. */
+    case ASK_RESULT, ASK_DIALOG
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCToken.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCToken.java
deleted file mode 100644
index d1953d6a..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCToken.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-/**
- * Represents a contiguous substring of the original input text produced by {@link NCTokenParser}.
- * A token is the result of {@link NCTokenParser tokenization} - the process of demarcating and classifying sections of a string of
- * input characters. See {@link NCPipeline} for documentation on the tokens place in the overall processing pipeline.
- *
- * <span class="hdr">Metadata</span>
- * Note that both {@link NCToken} and {@link NCEntity} interfaces extend {@link NCPropertyMap} interface
- * that allows them to store custom metadata properties. Parser, enrichers and validators for tokens
- * and entities use this capability to store and check their properties in tokens and entities.
- *
- * @see NCEntity
- * @see NCToken
- * @see NCTokenParser
- * @see NCTokenEnricher
- * @see NCTokenValidator
- * @see NCEntityParser
- * @see NCEntityEnricher
- * @see NCEntityValidator
- * @see NCEntityMapper
- */
-public interface NCToken extends NCPropertyMap {
-    /**
-     * Gets the text of this token.
-     *
-     * @return Text of this token.
-     */
-    String getText();
-
-    /**
-     * Gets the index of this token in the list returned bu {@link NCTokenParser#tokenize(String)} method.
-     *
-     * @return Zero-based index of this token in the list returned bu {@link NCTokenParser#tokenize(String)} method.
-     */
-    int getIndex();
-
-    /**
-     * Gets the inclusive start position of this token's text in the original input text supplied to
-     * {@link NCTokenParser#tokenize(String)} method.
-     *
-     * @return Start position (inlcusive) of this token's text in the original input text.
-     */
-    int getStartCharIndex();
-
-    /**
-     * Gets the inclusive end position of this token's text in the original input text supplied to
-     * {@link NCTokenParser#tokenize(String)} method.
-     *
-     * @return End position (invlusvie) of this token's text in the original input text.
-     * @return
-     */
-    int getEndCharIndex();
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCToken.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCToken.scala
new file mode 100644
index 00000000..46ba149b
--- /dev/null
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCToken.scala
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft
+
+/**
+  * Represents a contiguous substring of the original input text produced by {@link NCTokenParser}.
+  * A token is the result of {@link NCTokenParser tokenization} - the process of demarcating and classifying sections of a string of
+  * input characters. See {@link NCPipeline} for documentation on the tokens place in the overall processing pipeline.
+  *
+  * <span class="hdr">Metadata</span>
+  * Note that both {@link NCToken} and {@link NCEntity} interfaces extend {@link NCPropertyMap} interface
+  * that allows them to store custom metadata properties. Parser, enrichers and validators for tokens
+  * and entities use this capability to store and check their properties in tokens and entities.
+  *
+  * @see NCEntity
+  * @see NCToken
+  * @see NCTokenParser
+  * @see NCTokenEnricher
+  * @see NCTokenValidator
+  * @see NCEntityParser
+  * @see NCEntityEnricher
+  * @see NCEntityValidator
+  * @see NCEntityMapper */
+trait NCToken extends NCPropertyMap:
+    /**
+      * Gets the text of this token.
+      *
+      * @return Text of this token. */
+    def getText: String
+
+    /**
+      * Gets the index of this token in the list returned bu {@link NCTokenParser# tokenize ( String )} method.
+      *
+      * @return Zero-based index of this token in the list returned bu {@link NCTokenParser# tokenize ( String )} method. */
+    def getIndex: Int
+
+    /**
+      * Gets the inclusive start position of this token's text in the original input text supplied to
+      * {@link NCTokenParser# tokenize ( String )} method.
+      *
+      * @return Start position (inlcusive) of this token's text in the original input text. */
+    def getStartCharIndex: Int
+
+    /**
+      * Gets the inclusive end position of this token's text in the original input text supplied to
+      * {@link NCTokenParser# tokenize ( String )} method.
+      *
+      * @return End position (invlusvie) of this token's text in the original input text.
+      * @return */
+    def getEndCharIndex: Int
diff --git a/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestModelAdapter.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenEnricher.scala
similarity index 69%
copy from nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestModelAdapter.scala
copy to nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenEnricher.scala
index a5412b0f..de5fcfa3 100644
--- a/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestModelAdapter.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenEnricher.scala
@@ -15,14 +15,18 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.nlp.util
-
-import org.apache.nlpcraft.*
-import org.apache.nlpcraft.nlp.util.*
+package org.apache.nlpcraft
 
 /**
   *
-  */
-abstract class NCTestModelAdapter extends NCModel:
-    override val getConfig: NCModelConfig = CFG
-    override val getPipeline: NCPipeline = mkEnPipeline
\ No newline at end of file
+  * @see NCEntity
+  * @see NCToken
+  * @see NCTokenParser
+  * @see NCTokenEnricher
+  * @see NCTokenValidator
+  * @see NCEntityParser
+  * @see NCEntityEnricher
+  * @see NCEntityValidator
+  * @see NCEntityMapper */
+trait NCTokenEnricher extends NCLifecycle:
+    def enrich(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): Unit
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenParser.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenParser.java
deleted file mode 100644
index de33198d..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenParser.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.util.List;
-
-/**
- * A tokenizer that splits a text into the list of {@link NCToken tokens}.
- * <p>
- * See {@link NCPipeline} for documentation on the token parser place
- * in the overall processing pipeline.
- *
- * @see NCEntity
- * @see NCToken
- * @see NCTokenParser
- * @see NCTokenEnricher
- * @see NCTokenValidator
- * @see NCEntityParser
- * @see NCEntityEnricher
- * @see NCEntityValidator
- * @see NCEntityMapper
- */
-public interface NCTokenParser {
-    /**
-     * Splits given text into list of tokens. Can return an empty list but never {@code null}.
-     *
-     * @param text A text to split into tokens. Can be empty but never {@code null}.
-     * @return List of tokens. Can be empty but never {@code null}.
-     */
-    List<NCToken> tokenize(String text);
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenValidator.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenParser.scala
similarity index 61%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenValidator.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenParser.scala
index 94b7ffe9..1679da5d 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenValidator.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenParser.scala
@@ -15,28 +15,22 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft;
-
-import java.util.List;
+package org.apache.nlpcraft
 
 /**
- *
- * @see NCEntity
- * @see NCToken
- * @see NCTokenParser
- * @see NCTokenEnricher
- * @see NCTokenValidator
- * @see NCEntityParser
- * @see NCEntityEnricher
- * @see NCEntityValidator
- * @see NCEntityMapper
- */
-public interface NCTokenValidator extends NCLifecycle {
-    /**
-     *
-     * @param req
-     * @param cfg
-     * @param toks
-     */
-    void validate(NCRequest req, NCModelConfig cfg, List<NCToken> toks);
-}
+  * A tokenizer that splits a text into the list of {@link NCToken tokens}.
+  * <p>
+  * See {@link NCPipeline} for documentation on the token parser place
+  * in the overall processing pipeline.
+  *
+  * @see NCEntity
+  * @see NCToken
+  * @see NCTokenParser
+  * @see NCTokenEnricher
+  * @see NCTokenValidator
+  * @see NCEntityParser
+  * @see NCEntityEnricher
+  * @see NCEntityValidator
+  * @see NCEntityMapper */
+trait NCTokenParser:
+    def tokenize(text: String): List[NCToken]
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/NCIDLEntity.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenValidator.scala
similarity index 69%
copy from nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/NCIDLEntity.scala
copy to nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenValidator.scala
index e41aaee3..49185a0d 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/NCIDLEntity.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenValidator.scala
@@ -15,16 +15,20 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.internal.intent
-
-import org.apache.nlpcraft.*
-
-import scala.jdk.CollectionConverters.*
+package org.apache.nlpcraft
 
 /**
   *
-  * @param impl
-  * @param index
-  */
-case class NCIDLEntity(impl: NCEntity, index: Int):
-    lazy val text: String = impl.mkText()
+  * @see NCEntity
+  * @see NCToken
+  * @see NCTokenParser
+  * @see NCTokenEnricher
+  * @see NCTokenValidator
+  * @see NCEntityParser
+  * @see NCEntityEnricher
+  * @see NCEntityValidator
+  * @see NCEntityMapper */
+trait NCTokenValidator extends NCLifecycle:
+    def validate(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): Unit
+
+
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCVariant.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCVariant.java
deleted file mode 100644
index 5e77f2ac..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCVariant.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft;
-
-import java.util.List;
-
-/**
- * A parsing variant is a list of entities defining one possible parsing of the ipnut query. Note that a given input 
- * query can have one or more possible different parsing variants. Depending on model configuration a user input
- * can produce hundreds or even thousands of parsing variants.
- * 
- * @see NCModel#onVariant(NCVariant)
- */
-public interface NCVariant {
-    /**
-     * Gets the list of entities for this variant.
-     *
-     * @return List of entities for this variant. Can be empty but never {@code null}.
-     */
-    List<NCEntity> getEntities();
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenEnricher.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCVariant.scala
similarity index 58%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenEnricher.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/NCVariant.scala
index 6d8571ca..3fa27cc2 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCTokenEnricher.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCVariant.scala
@@ -15,29 +15,17 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft;
-
-import java.util.List;
+package org.apache.nlpcraft
 
 /**
- *
- * @see NCEntity
- * @see NCToken
- * @see NCTokenParser
- * @see NCTokenEnricher
- * @see NCTokenValidator
- * @see NCEntityParser
- * @see NCEntityEnricher
- * @see NCEntityValidator
- * @see NCEntityMapper
- */
-public interface NCTokenEnricher extends NCLifecycle {
+  * A parsing variant is a list of entities defining one possible parsing of the ipnut query. Note that a given input
+  * query can have one or more possible different parsing variants. Depending on model configuration a user input
+  * can produce hundreds or even thousands of parsing variants.
+  *
+  * @see NCModel#onVariant(NCVariant) */
+trait NCVariant:
     /**
-     *
-     * @param req
-     * @param cfg
-     * @param toks
-     * @throws NCException
-     */
-    void enrich(NCRequest req, NCModelConfig cfg, List<NCToken> toks);
-}
+      * Gets the list of entities for this variant.
+      *
+      * @return List of entities for this variant. Can be empty but never {@code null}. */
+    def getEntities: List[NCEntity]
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticStemmer.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCVariantFilter.scala
similarity index 78%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticStemmer.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/NCVariantFilter.scala
index 279e4f4d..755b9524 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticStemmer.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCVariantFilter.scala
@@ -15,16 +15,11 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.nlp.entity.parser.semantic;
+package org.apache.nlpcraft
 
-/**
- * 
- */
-public interface NCSemanticStemmer {
+trait NCVariantFilter extends NCLifecycle:
     /**
-     *
-     * @param txt
-     * @return
-     */
-    String stem(String txt);
-}
+      * @param req
+      * @param cfg
+      * @param vars */
+    def filter(req: NCRequest, cfg: NCModelConfig, vars: List[NCVariant]): List[NCVariant]
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntent.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntent.java
similarity index 93%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntent.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntent.java
index 5df1f0d6..c98817ae 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntent.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntent.java
@@ -15,7 +15,11 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft;
+package org.apache.nlpcraft.annotations;
+
+import org.apache.nlpcraft.NCIntentMatch;
+import org.apache.nlpcraft.NCIntentSkip;
+import org.apache.nlpcraft.NCModel;
 
 import java.lang.annotation.Documented;
 import java.lang.annotation.Repeatable;
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentObject.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentObject.java
similarity index 96%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentObject.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentObject.java
index ae8c6cd7..b11ebc1e 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentObject.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentObject.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft;
+package org.apache.nlpcraft.annotations;
 
 import java.lang.annotation.Documented;
 import java.lang.annotation.Retention;
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentRef.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentRef.java
similarity index 93%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentRef.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentRef.java
index 68ee55b4..5f6af9c7 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentRef.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentRef.java
@@ -15,7 +15,11 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft;
+package org.apache.nlpcraft.annotations;
+
+import org.apache.nlpcraft.NCIntentMatch;
+import org.apache.nlpcraft.NCIntentSkip;
+import org.apache.nlpcraft.NCModel;
 
 import java.lang.annotation.Documented;
 import java.lang.annotation.Repeatable;
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentSample.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentSample.java
similarity index 93%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentSample.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentSample.java
index 25b55bc0..760e4e41 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentSample.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentSample.java
@@ -15,7 +15,11 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft;
+package org.apache.nlpcraft.annotations;
+
+import org.apache.nlpcraft.NCIntentMatch;
+import org.apache.nlpcraft.NCIntentSkip;
+import org.apache.nlpcraft.NCModel;
 
 import java.lang.annotation.Documented;
 import java.lang.annotation.Repeatable;
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentSampleRef.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentSampleRef.java
similarity index 94%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentSampleRef.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentSampleRef.java
index 3f5d27f7..f326dd4e 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentSampleRef.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentSampleRef.java
@@ -15,7 +15,11 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft;
+package org.apache.nlpcraft.annotations;
+
+import org.apache.nlpcraft.NCIntentMatch;
+import org.apache.nlpcraft.NCIntentSkip;
+import org.apache.nlpcraft.NCModel;
 
 import java.lang.annotation.Documented;
 import java.lang.annotation.Repeatable;
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentTerm.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentTerm.java
similarity index 90%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentTerm.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentTerm.java
index fd4a6fd8..12c34481 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentTerm.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/annotations/NCIntentTerm.java
@@ -15,7 +15,11 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft;
+package org.apache.nlpcraft.annotations;
+
+import org.apache.nlpcraft.NCIntentMatch;
+import org.apache.nlpcraft.NCIntentSkip;
+import org.apache.nlpcraft.NCModel;
 
 import java.lang.annotation.Documented;
 import java.lang.annotation.Retention;
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/ascii/NCAsciiTable.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/ascii/NCAsciiTable.scala
index 2b5747a9..d318db2b 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/ascii/NCAsciiTable.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/ascii/NCAsciiTable.scala
@@ -23,9 +23,7 @@ import org.apache.nlpcraft.internal.ascii.NCAsciiTable.*
 import org.apache.nlpcraft.internal.util.NCUtils
 
 import java.io.*
-import java.util.List as JList
 import scala.collection.mutable
-import scala.jdk.CollectionConverters.CollectionHasAsScala
 import scala.util.Using
 
 /**
@@ -206,9 +204,9 @@ class NCAsciiTable:
       *
       * @param cells Row cells.
       */
-    def addRow(cells: JList[Any]): NCAsciiTable =
+    def addRow(cells: List[Any]): NCAsciiTable =
         startRow()
-        cells.asScala.foreach(p => addRowCell(p))
+        cells.foreach(p => addRowCell(p))
         endRow()
         this
 
@@ -249,8 +247,8 @@ class NCAsciiTable:
       *
       * @param cells Header cells.
       */
-    def addHeaders(cells: JList[Any]): NCAsciiTable =
-        cells.asScala.foreach(addHeaderCell(_))
+    def addHeaders(cells: List[Any]): NCAsciiTable =
+        cells.foreach(addHeaderCell(_))
         this
 
     /**
@@ -259,8 +257,8 @@ class NCAsciiTable:
       * @param style Style top use.
       * @param cells Header cells.
       */
-    def addStyledHeaders(style: String, cells: JList[Any]): NCAsciiTable =
-        cells.asScala.foreach(addHeaderCell(style, _))
+    def addStyledHeaders(style: String, cells: List[Any]): NCAsciiTable =
+        cells.foreach(addHeaderCell(style, _))
         this
 
     // Handles the 'null' strings.
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/conversation/NCConversationData.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/conversation/NCConversationData.scala
index 3092964c..cca726a3 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/conversation/NCConversationData.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/conversation/NCConversationData.scala
@@ -24,9 +24,7 @@ import org.apache.nlpcraft.internal.util.*
 
 import java.util
 import java.util.concurrent.ConcurrentHashMap
-import java.util.function.Predicate
 import scala.collection.mutable
-import scala.jdk.CollectionConverters.*
 
 /**
   * An active conversation is an ordered set of utterances for the specific user and data model.
@@ -116,11 +114,11 @@ case class NCConversationData(
       *
       * @param p Java-side predicate.
       */
-    def clear(p: Predicate[NCEntity]): Unit =
+    def clear(p: NCEntity => Boolean): Unit =
         stm.synchronized {
-            for (item <- stm) item.holders --= item.holders.filter(h => p.test(h.entity))
+            for (item <- stm) item.holders --= item.holders.filter(h => p(h.entity))
             squeezeEntities()
-            replaceContext(ctx.filter(ent => !p.test(ent)))
+            replaceContext(ctx.filter(ent => !p(ent)))
         }
 
         logger.trace(s"STM is cleared [usrId=$usrId, mdlId=$mdlId]")
@@ -138,7 +136,7 @@ case class NCConversationData(
       * @param reqId Server request ID.
       * @param ents Entities to add to the conversation STM.
       */
-    def addEntities(reqId: String, ents: Seq[NCEntity]): Unit =
+    def addEntities(reqId: String, ents: List[NCEntity]): Unit =
         stm.synchronized {
             depth = 0
             lastEnts += ents // Last used entities processing.
@@ -159,7 +157,7 @@ case class NCConversationData(
                 stepLogEntity(ents)
 
                 val registered = mutable.HashSet.empty[Seq[String]]
-                for (item <- stm.reverse; (gs, hs) <- item.holders.groupBy(t => if (t.entity.getGroups != null) t.entity.getGroups.asScala else Seq.empty))
+                for (item <- stm.reverse; (gs, hs) <- item.holders.groupBy(t => if t.entity.getGroups != null then t.entity.getGroups else Seq.empty))
                     val grps = gs.toSeq.sorted
 
                     // Reversed iteration.
@@ -192,7 +190,7 @@ case class NCConversationData(
             val tbl = NCAsciiTable("Entity ID", "Groups", "Request ID")
             ctx.foreach(ent => tbl += (
                 ent.getId,
-                ent.getGroups.asScala.mkString(", "),
+                ent.getGroups.mkString(", "),
                 ent.getRequestId
             ))
             logger.info(s"Current STM for [$z]:\n${tbl.toString()}")
@@ -201,9 +199,9 @@ case class NCConversationData(
       *
       * @return
       */
-    def getEntities: Seq[NCEntity] = stm.synchronized {
+    def getEntities: List[NCEntity] = stm.synchronized {
         val reqIds = ctx.map(_.getRequestId).distinct.zipWithIndex.toMap
-        ctx.groupBy(_.getRequestId).toSeq.sortBy(p => reqIds(p._1)).reverse.flatMap(_._2)
+        ctx.groupBy(_.getRequestId).toList.sortBy(p => reqIds(p._1)).reverse.flatMap(_._2)
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/conversation/NCConversationManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/conversation/NCConversationManager.scala
index ce9d66c8..1af4e448 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/conversation/NCConversationManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/conversation/NCConversationManager.scala
@@ -22,7 +22,6 @@ import org.apache.nlpcraft.*
 import org.apache.nlpcraft.internal.util.NCUtils
 
 import scala.collection.*
-import scala.jdk.CollectionConverters.*
 
 /**
   * Conversation manager.
@@ -42,7 +41,7 @@ class NCConversationManager(cfg: NCModelConfig) extends LazyLogging:
         convs.synchronized {
             val v = convs.getOrElseUpdate(
                 usrId,
-                Value(NCConversationData(usrId, cfg.getId, cfg.getConversationTimeout, cfg.getConversationDepth))
+                Value(NCConversationData(usrId, cfg.id, cfg.conversationTimeout, cfg.conversationDepth))
             )
 
             v.tstamp = NCUtils.nowUtcMs()
@@ -60,7 +59,7 @@ class NCConversationManager(cfg: NCModelConfig) extends LazyLogging:
         val delKeys = mutable.HashSet.empty[String]
 
         for ((key, value) <- convs)
-            if value.tstamp < now - cfg.getConversationTimeout then
+            if value.tstamp < now - cfg.conversationTimeout then
                 value.conv.clear()
                 delKeys += key
 
@@ -74,7 +73,7 @@ class NCConversationManager(cfg: NCModelConfig) extends LazyLogging:
       * @return
       */
     def start(): Unit =
-        gc = NCUtils.mkThread("conv-mgr-gc", cfg.getId) { t =>
+        gc = NCUtils.mkThread("conv-mgr-gc", cfg.id) { t =>
             while (!t.isInterrupted)
                 try
                     convs.synchronized {
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/dialogflow/NCDialogFlowManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/dialogflow/NCDialogFlowManager.scala
index e93b1fce..f9342bd7 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/dialogflow/NCDialogFlowManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/dialogflow/NCDialogFlowManager.scala
@@ -24,8 +24,6 @@ import org.apache.nlpcraft.internal.util.NCUtils
 
 import java.text.DateFormat
 import java.time.format.DateTimeFormatter
-import java.util
-import java.util.*
 import scala.collection.*
 
 /**
@@ -42,7 +40,7 @@ class NCDialogFlowManager(cfg: NCModelConfig) extends LazyLogging:
     private def clearForTimeout(): Long =
         require(Thread.holdsLock(flow))
 
-        val timeout = cfg.getConversationTimeout
+        val timeout = cfg.conversationTimeout
         val bound = NCUtils.now() - timeout
         var next = Long.MaxValue
 
@@ -79,7 +77,7 @@ class NCDialogFlowManager(cfg: NCModelConfig) extends LazyLogging:
       * @return
       */
     def start(): Unit =
-        gc = NCUtils.mkThread("dialog-mgr-gc", cfg.getId) { t =>
+        gc = NCUtils.mkThread("dialog-mgr-gc", cfg.id) { t =>
             while (!t.isInterrupted)
                 try
                     flow.synchronized {
@@ -144,10 +142,10 @@ class NCDialogFlowManager(cfg: NCModelConfig) extends LazyLogging:
       * @param usrId User ID.
       * @return Dialog flow.
       */
-    def getDialogFlow(usrId: String): Seq[NCDialogFlowItem] =
+    def getDialogFlow(usrId: String): List[NCDialogFlowItem] =
         flow.synchronized { flow.get(usrId) } match
-            case Some(buf) => buf.toSeq
-            case None => Seq.empty
+            case Some(buf) => buf.toList
+            case None => List.empty
 
     /**
       * Prints out ASCII table for current dialog flow.
@@ -169,11 +167,11 @@ class NCDialogFlowManager(cfg: NCModelConfig) extends LazyLogging:
                 itm.getIntentMatch.getIntentId,
                 itm.getRequest.getRequestId,
                 itm.getRequest.getText,
-                DateFormat.getDateTimeInstance.format(new Date(itm.getRequest.getReceiveTimestamp))
+                DateFormat.getDateTimeInstance.format(new java.util.Date(itm.getRequest.getReceiveTimestamp))
             )
         }
 
-        logger.info(s"""Current dialog flow (oldest first) for [mdlId=${cfg.getId}, usrId=$usrId]\n${tbl.toString()}""")
+        logger.info(s"""Current dialog flow (oldest first) for [mdlId=${cfg.id}, usrId=$usrId]\n${tbl.toString()}""")
 
     /**
       * Clears dialog history for given user ID.
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelPipelineManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelPipelineManager.scala
index 74c3f592..78d9e5df 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelPipelineManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelPipelineManager.scala
@@ -30,11 +30,9 @@ import java.util
 import java.util.concurrent.*
 import java.util.concurrent.atomic.*
 import java.util.function.Predicate
-import java.util.{ArrayList, Objects, UUID, Collections as JColls, List as JList, Map as JMap}
-import scala.collection.{immutable, mutable}
+import java.util.{Objects, UUID}
 import scala.concurrent.ExecutionContext
 import scala.jdk.CollectionConverters.*
-import scala.jdk.OptionConverters.*
 
 /**
   *
@@ -42,7 +40,7 @@ import scala.jdk.OptionConverters.*
   * @param variants
   * @param tokens
   */
-case class NCPipelineData(request: NCRequest, variants: Seq[NCVariant], tokens: JList[NCToken])
+case class NCPipelineData(request: NCRequest, variants: List[NCVariant], tokens: List[NCToken])
 
 /**
   *
@@ -58,7 +56,7 @@ class NCModelPipelineManager(cfg: NCModelConfig, pipeline: NCPipeline) extends L
     private val tokVals = nvl(pipeline.getTokenValidators)
     private val entVals = nvl(pipeline.getEntityValidators)
     private val entMappers = nvl(pipeline.getEntityMappers)
-    private val varFilterOpt = pipeline.getVariantFilter.toScala
+    private val varFilterOpt = pipeline.getVariantFilter
 
     private val allComps: Seq[NCLifecycle] =
         tokEnrichers ++ entEnrichers ++ entParsers ++ tokVals ++ entVals ++ entMappers ++ varFilterOpt.toSeq
@@ -75,7 +73,7 @@ class NCModelPipelineManager(cfg: NCModelConfig, pipeline: NCPipeline) extends L
                 act(p)
                 logger.info(s"Component $actVerb: '${p.getClass.getName}'")
             }
-        )*)(ExecutionContext.Implicits.global)
+        ))(ExecutionContext.Implicits.global)
 
     /**
       *
@@ -83,7 +81,7 @@ class NCModelPipelineManager(cfg: NCModelConfig, pipeline: NCPipeline) extends L
       * @tparam T
       * @return
       */
-    private def nvl[T](list: JList[T]): Seq[T] = if list == null then Seq.empty else list.asScala.toSeq
+    private def nvl[T](list: List[T]): Seq[T] = if list == null then List.empty else list
 
     /**
       *
@@ -91,8 +89,8 @@ class NCModelPipelineManager(cfg: NCModelConfig, pipeline: NCPipeline) extends L
       * @return
       */
     private def mkProps(m: NCPropertyMap): String =
-        if m.keysSet().isEmpty then ""
-        else m.keysSet().asScala.toSeq.sorted.map(p => s"$p=${m.get[Any](p)}").mkString("{", ", ", "}")
+        if m.keysSet.isEmpty then ""
+        else m.keysSet.toSeq.sorted.map(p => s"$p=${m.get[Any](p)}").mkString("{", ", ", "}")
 
     /**
       *
@@ -101,7 +99,7 @@ class NCModelPipelineManager(cfg: NCModelConfig, pipeline: NCPipeline) extends L
       * @param usrId
       * @return
       */
-    def prepare(txt: String, data: JMap[String, AnyRef], usrId: String): NCPipelineData =
+    def prepare(txt: String, data: Map[String, Any], usrId: String): NCPipelineData =
         require(txt != null && usrId != null)
 
         /**
@@ -109,25 +107,25 @@ class NCModelPipelineManager(cfg: NCModelConfig, pipeline: NCPipeline) extends L
           * @param ents
           * @return
           */
-        def newVariant(ents: Seq[NCEntity]): NCVariant =
+        def newVariant(ents: List[NCEntity]): NCVariant =
             new NCVariant:
-                override val getEntities: JList[NCEntity] = ents.asJava
+                override val getEntities: List[NCEntity] = ents
 
         val req: NCRequest = new NCRequest:
             override val getUserId: String = usrId
             override val getRequestId: String = UUID.randomUUID().toString
             override val getText: String = txt
             override val getReceiveTimestamp: Long = System.currentTimeMillis()
-            override val getRequestData: JMap[String, AnyRef] = data
+            override val getRequestData: Map[String, Any] = data
 
         val toks = tokParser.tokenize(txt)
 
-        if toks.size() > 0 then
+        if toks.nonEmpty then
             for (e <- tokEnrichers) e.enrich(req, cfg, toks)
 
         val tbl = NCAsciiTable("Text", "Start index", "End index", "Properties")
 
-        for (t <- toks.asScala)
+        for (t <- toks)
             tbl += (
                 t.getText,
                 t.getStartCharIndex,
@@ -139,50 +137,48 @@ class NCModelPipelineManager(cfg: NCModelConfig, pipeline: NCPipeline) extends L
         // NOTE: we run validators regardless of whether token list is empty.
         for (v <- tokVals) v.validate(req, cfg, toks)
 
-        var entsList: util.List[NCEntity] = new util.ArrayList[NCEntity]()
+        var entities: List[NCEntity] = List.empty
 
-        for (p <- entParsers) entsList.addAll(p.parse(req, cfg, toks))
+        for (p <- entParsers) entities ++= p.parse(req, cfg, toks)
 
-        if entsList.size() > 0 then
-            for (e <- entEnrichers) e.enrich(req, cfg, entsList)
+        if entities.nonEmpty then
+            for (e <- entEnrichers) e.enrich(req, cfg, entities)
 
         // NOTE: we run validators regardless of whether entity list is empty.
-        for (v <- entVals) v.validate(req, cfg, entsList)
+        for (v <- entVals) v.validate(req, cfg, entities)
 
         for (m <- entMappers)
-            entsList = m.map(req, cfg, entsList)
-            if entsList == null then E("Entity mapper cannot return null values.")
-
-        val entities = entsList.asScala.toSeq
+            entities = m.map(req, cfg, entities)
+            if entities == null then E("Entity mapper cannot return null values.")
 
         val overlapEnts: Seq[Set[NCEntity]] =
-            toks.asScala.
+            toks.
             // Looks at each token.
             map(t => t.getIndex -> entities.filter(_.getTokens.contains(t))).
             // Collects all overlapped entities.
-            map { case (_, ents) => if (ents.sizeIs > 1) ents.toSet else Set.empty }.filter(_.nonEmpty).toSeq
+            map { case (_, ents) => if ents.sizeIs > 1 then ents.toSet else Set.empty }.filter(_.nonEmpty)
 
-        var variants: JList[NCVariant] =
+        var variants: List[NCVariant] =
             if overlapEnts.nonEmpty then
                 NCModelPipelineHelper.
-                    findCombinations(overlapEnts.map(_.asJava).asJava, pool).
-                    asScala.map(_.asScala).map(delComb =>
+                    findCombinations(overlapEnts.map(_.asJava).asJava, pool).asScala.
+                    map(_.asScala).map(delComb =>
                         val delSet = delComb.toSet
                         newVariant(entities.filter(!delSet.contains(_)))
-                    ).asJava
+                    ).toList
             else
-                Seq(newVariant(entities)).asJava
+                List(newVariant(entities))
 
         if varFilterOpt.isDefined then variants = varFilterOpt.get.filter(req, cfg, variants)
 
         // Skips empty variants.
-        val vrns = variants.asScala.toSeq.filter(!_.getEntities.isEmpty)
+        val vrns = variants.filter(_.getEntities.nonEmpty)
 
         for ((v, i) <- vrns.zipWithIndex)
             val tbl = NCAsciiTable("EntityId", "Tokens", "Tokens Position", "Properties")
 
-            for (e <- v.getEntities.asScala)
-                val toks = e.getTokens.asScala
+            for (e <- v.getEntities)
+                val toks = e.getTokens
                 tbl += (
                     e.getId,
                     toks.map(_.getText).mkString("|"),
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelScanner.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelScanner.scala
index 5cde621a..d1eb49ca 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelScanner.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelScanner.scala
@@ -19,6 +19,7 @@ package org.apache.nlpcraft.internal.impl
 
 import com.typesafe.scalalogging.LazyLogging
 import org.apache.nlpcraft.*
+import org.apache.nlpcraft.annotations.*
 import org.apache.nlpcraft.internal.intent.*
 import org.apache.nlpcraft.internal.intent.compiler.*
 import org.apache.nlpcraft.internal.makro.NCMacroParser
@@ -43,9 +44,6 @@ import scala.util.Using
   */
 case class NCModelIntent(intent: NCIDLIntent, function: NCIntentMatch => NCResult, samples: Seq[Seq[String]])
 
-/**
-  *
-  */
 object NCModelScanner extends LazyLogging:
     private final val CLS_INTENT = classOf[NCIntent]
     private final val CLS_INTENT_REF = classOf[NCIntentRef]
@@ -147,38 +145,38 @@ object NCModelScanner extends LazyLogging:
       * @param ctxFirstPrm
       * @return
       */
-    private def prepareParams(cfg: NCModelConfig, mtd: Method, prmClss: Seq[Class[_]], argsList: Seq[util.List[NCEntity]], ctxFirstPrm: Boolean): Seq[AnyRef] =
+    private def prepareParams(cfg: NCModelConfig, mtd: Method, prmClss: List[Class[_]], argsList: List[List[NCEntity]], ctxFirstPrm: Boolean): Seq[AnyRef] =
         prmClss.zip(argsList).zipWithIndex.map { case ((paramCls, argList), i) =>
             def mkArg(): String = arg2Str(mtd, i, ctxFirstPrm)
 
-            lazy val z = s"mdlId=${cfg.getId}, type=$paramCls, arg=${mkArg()}"
-            val entsCnt = argList.size()
+            lazy val z = s"mdlId=${cfg.id}, type=$paramCls, arg=${mkArg()}"
+            val entsCnt = argList.size
 
             // Single entity.
             if paramCls == CLS_ENTITY then
                 if entsCnt != 1 then E(s"Expected single entity (found $entsCnt) in $IT annotated argument [$z]")
 
-                argList.get(0)
+                argList.head
             // Array of entities.
             else if paramCls.isArray then
-                argList.asScala.toArray
+                argList.toArray
             // Scala and Java list of entities.
             else if paramCls == CLS_SCALA_SEQ then
-                argList.asScala.toSeq
+                argList
             else if paramCls == CLS_SCALA_LST then
-                argList.asScala.toList
+                argList
             else if paramCls == CLS_JAVA_LST then
                 argList
             // Scala and java optional entity.
             else if paramCls == CLS_SCALA_OPT then
                 entsCnt match
                     case 0 => None
-                    case 1 => Option(argList.get(0))
+                    case 1 => Option(argList.head)
                     case _ => E(s"Too many entities ($entsCnt) for 'scala.Option[_]' $IT annotated argument [$z]")
             else if paramCls == CLS_JAVA_OPT then
                 entsCnt match
                     case 0 => util.Optional.empty()
-                    case 1 => util.Optional.of(argList.get(0))
+                    case 1 => util.Optional.of(argList.head)
                     case _ => E(s"Too many entities ($entsCnt) for 'java.util.Optional' $IT annotated argument [$z]")
             else
                 // All allowed arguments types already checked.
@@ -196,7 +194,7 @@ object NCModelScanner extends LazyLogging:
     private def invoke(cfg: NCModelConfig, mtd: Method, obj: AnyRef, args: scala.Array[AnyRef]): NCResult =
         val methodObj = if Modifier.isStatic(mtd.getModifiers) then null else obj
         var flag = mtd.canAccess(methodObj)
-        lazy val z = s"mdlId=${cfg.getId}, callback=${method2Str(mtd)}"
+        lazy val z = s"mdlId=${cfg.id}, callback=${method2Str(mtd)}"
         try
             if !flag then
                 mtd.setAccessible(true)
@@ -228,7 +226,7 @@ object NCModelScanner extends LazyLogging:
         lazy val fStr = field2Str(field)
         val fieldObj = if Modifier.isStatic(field.getModifiers) then null else obj
         var flag = field.canAccess(fieldObj)
-        lazy val z = s"mdlId=${cfg.getId}, field=$fStr"
+        lazy val z = s"mdlId=${cfg.id}, field=$fStr"
         val res =
             try
                 if !flag then
@@ -335,7 +333,7 @@ object NCModelScanner extends LazyLogging:
         argClasses.zip(paramGenTypes).zipWithIndex.foreach { case ((argClass, paramGenType), i) =>
             def mkArg(): String = arg2Str(mtd, i, ctxFirstParam)
 
-            lazy val z = s"mdlId=${cfg.getId}, type=${class2Str(argClass)}, arg=${mkArg()}"
+            lazy val z = s"mdlId=${cfg.id}, type=${class2Str(argClass)}, arg=${mkArg()}"
 
             // Entity.
             if argClass == CLS_ENTITY then () // No-op.
@@ -395,7 +393,7 @@ object NCModelScanner extends LazyLogging:
             def mkArg(): String = arg2Str(mtd, i, ctxFirstParam)
 
             val p1 = "its $IT annotated argument"
-            val p2 = s"mdlId=${cfg.getId}, arg=${mkArg()}"
+            val p2 = s"mdlId=${cfg.id}, arg=${mkArg()}"
 
             // Argument is single entity but defined as not single entity.
             if cls == CLS_ENTITY && (min != 1 || max != 1) then
@@ -420,29 +418,29 @@ object NCModelScanner extends LazyLogging:
       * @return
       */
     private def prepareCallback(cfg: NCModelConfig, method: Method, obj: AnyRef, intent: NCIDLIntent): NCIntentMatch => NCResult =
-        lazy val z = s"mdlId=${cfg.getId}, intentId=${intent.id}, type=${class2Str(method.getReturnType)}, callback=${method2Str(method)}"
+        lazy val z = s"mdlId=${cfg.id}, intentId=${intent.id}, type=${class2Str(method.getReturnType)}, callback=${method2Str(method)}"
 
         // Checks method result type.
         if method.getReturnType != CLS_QRY_RES then E(s"Unexpected result type for @NCIntent annotated method [$z]")
 
-        val allParamTypes = method.getParameterTypes.toSeq
+        val allParamTypes = method.getParameterTypes.toList
         val ctxFirstParam = allParamTypes.nonEmpty && allParamTypes.head == CLS_INTENT_MATCH
 
-        def getSeq[T](data: Seq[T]): Seq[T] =
-            if data == null then Seq.empty
+        def getList[T](data: List[T]): List[T] =
+            if data == null then List.empty
             else if ctxFirstParam then data.drop(1)
             else data
 
         val allAnns = method.getParameterAnnotations
-        val tokParamAnns = getSeq(allAnns.toIndexedSeq).filter(_ != null)
-        val tokParamTypes = getSeq(allParamTypes)
+        val tokParamAnns = getList(allAnns.toList).filter(_ != null)
+        val tokParamTypes = getList(allParamTypes)
 
         // Checks entities parameters annotations count.
         if tokParamAnns.sizeIs != tokParamTypes.length then
             E(s"Unexpected annotations count for $I annotated method [count=${tokParamAnns.size}, $z]")
 
         // Gets terms IDs.
-        val termIds = tokParamAnns.toList.zipWithIndex.map {
+        val termIds = tokParamAnns.zipWithIndex.map {
             case (annArr, idx) =>
                 def mkArg(): String = arg2Str(method, idx, ctxFirstParam)
 
@@ -472,7 +470,7 @@ object NCModelScanner extends LazyLogging:
             E(s"Unknown term ID in $IT annotation [termId=${invalidIds.head}, $z]")
 
         // Checks parameters.
-        val paramGenTypes = getSeq(method.getGenericParameterTypes.toIndexedSeq)
+        val paramGenTypes = getList(method.getGenericParameterTypes.toList)
         checkTypes(cfg, method, tokParamTypes, paramGenTypes, ctxFirstParam)
 
         // Checks limits.
@@ -482,7 +480,7 @@ object NCModelScanner extends LazyLogging:
         (ctx: NCIntentMatch) =>
             val args = mutable.Buffer.empty[AnyRef]
             if ctxFirstParam then args += ctx
-            args ++= prepareParams(cfg, method, tokParamTypes, termIds.map(ctx.getTermEntities), ctxFirstParam)
+            args ++= prepareParams(cfg, method, tokParamTypes, termIds.map(id => ctx.getTermEntities(id)), ctxFirstParam)
             invoke(cfg, method, obj, args.toArray)
 
     /**
@@ -493,7 +491,7 @@ object NCModelScanner extends LazyLogging:
         require(mdl != null)
 
         val cfg = mdl.getConfig
-        lazy val z = s"mdlId=${cfg.getId}"
+        lazy val z = s"mdlId=${cfg.id}"
         val intentsMtds = mutable.HashMap.empty[Method, IntentHolder]
         val intentDecls = mutable.HashMap.empty[String, NCIDLIntent]
         val objs = mutable.Buffer.empty[AnyRef]
@@ -512,7 +510,7 @@ object NCModelScanner extends LazyLogging:
 
         def processClassAnnotations(cls: Class[_]): Unit =
             if cls != null && processed.add(cls) then
-                for (ann <- cls.getAnnotationsByType(CLS_INTENT); intent <- NCIDLCompiler.compile(ann.value, cfg, class2Str(cls)))
+                for (ann <- cls.getAnnotationsByType(CLS_INTENT).reverse; intent <- NCIDLCompiler.compile(ann.value, cfg, class2Str(cls)))
                     addDecl(intent)
 
                 processClassAnnotations(cls.getSuperclass)
@@ -570,6 +568,6 @@ object NCModelScanner extends LazyLogging:
                 case ids if ids.nonEmpty => E(s"Duplicate intent IDs [$z, ids=${col2Str(ids)}]")
                 case _ => // No-op.
         else
-            logger.warn(s"Model has no intent: ${cfg.getId}")
+            logger.warn(s"Model has no intent: ${cfg.id}")
 
         intents.map(i => NCModelIntent(i.intent, i.function, samples.getOrElse(i.method, Map.empty).getOrElse(i.intent.id, Seq.empty))).toSeq
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/NCIDLEntity.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/NCIDLEntity.scala
index e41aaee3..0ac72876 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/NCIDLEntity.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/NCIDLEntity.scala
@@ -19,12 +19,10 @@ package org.apache.nlpcraft.internal.intent
 
 import org.apache.nlpcraft.*
 
-import scala.jdk.CollectionConverters.*
-
 /**
   *
   * @param impl
   * @param index
   */
 case class NCIDLEntity(impl: NCEntity, index: Int):
-    lazy val text: String = impl.mkText()
+    lazy val text: String = impl.mkText
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/NCIDLCodeGenerator.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/NCIDLCodeGenerator.scala
index aeafbb04..17456e5e 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/NCIDLCodeGenerator.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/NCIDLCodeGenerator.scala
@@ -17,21 +17,19 @@
 
 package org.apache.nlpcraft.internal.intent.compiler
 
+import org.antlr.v4.runtime.ParserRuleContext as PRC
+import org.antlr.v4.runtime.tree.TerminalNode as TN
+import org.apache.commons.collections4.CollectionUtils
 import org.apache.commons.lang3.StringUtils
 import org.apache.nlpcraft.*
+import org.apache.nlpcraft.internal.intent.{NCIDLStackItem as Z, *}
 import org.apache.nlpcraft.internal.util.*
-import org.antlr.v4.runtime.{ParserRuleContext => PRC}
-import org.antlr.v4.runtime.tree.{TerminalNode => TN}
-import org.apache.commons.collections4.CollectionUtils
-import org.apache.nlpcraft.internal.intent.*
-import org.apache.nlpcraft.internal.intent.{NCIDLStackItem => Z}
 
-import java.lang.{Byte => JByte, Double => JDouble, Float => JFloat, Integer => JInt, Long => JLong, Short => JShort}
-import java.util.{Calendar, Collections, Collection => JColl, List => JList, Map => JMap}
+import java.lang.{Byte as JByte, Double as JDouble, Float as JFloat, Integer as JInt, Long as JLong, Short as JShort}
 import java.time.temporal.IsoFields
-import java.time.{LocalDate, LocalTime}
+import java.time.*
 import java.util
-
+import java.util.{Calendar, Collections, Collection as JColl, List as JList, Map as JMap}
 import scala.jdk.CollectionConverters.*
 
 trait NCIDLCodeGenerator:
@@ -913,17 +911,17 @@ trait NCIDLCodeGenerator:
                 case "or_else" => doOrElse()
 
                 // Model configuration.
-                case "mdl_id" => z0(() => Z(idlCtx.mdlCfg.getId, 0))
-                case "mdl_name" => z0(() => Z(idlCtx.mdlCfg.getName, 0))
-                case "mdl_ver" => z0(() => Z(idlCtx.mdlCfg.getVersion, 0))
-                case "mdl_origin" => z0(() => Z(idlCtx.mdlCfg.getOrigin, 0))
+                case "mdl_id" => z0(() => Z(idlCtx.mdlCfg.id, 0))
+                case "mdl_name" => z0(() => Z(idlCtx.mdlCfg.name, 0))
+                case "mdl_ver" => z0(() => Z(idlCtx.mdlCfg.version, 0))
+                case "mdl_origin" => z0(() => Z(idlCtx.mdlCfg.origin, 0))
 
                 // Entity functions.
                 case "ent_id" => arg1Tok() match { case x => stack.push(() => Z(toEntity(x().value).impl.getId, 1)) }
                 case "ent_index" => arg1Tok() match { case x => stack.push(() => Z(toEntity(x().value).index, 1)) }
                 case "ent_text" => arg1Tok() match { case x => stack.push(() => Z(toEntity(x().value).text, 1)) }
                 case "ent_count" => checkAvail(); z0(() => Z(idlCtx.entities.size, 0))
-                case "ent_groups" => arg1Tok() match { case x => stack.push(() => Z(JList.copyOf(toEntity(x().value).impl.getGroups), 1)) }
+                case "ent_groups" => arg1Tok() match { case x => stack.push(() => Z(JList.copyOf(toEntity(x().value).impl.getGroups.asJava), 1)) }
                 case "ent_all" => checkAvail(); z0(() => Z(idlCtx.entities.asJava, 0))
                 case "ent_all_for_id" => checkAvail(); doForAll((e, id) => e.impl.getId == id)
                 case "ent_all_for_group" => checkAvail(); doForAll((e, grp) => e.impl.getGroups.contains(grp))
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/NCIDLCompiler.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/NCIDLCompiler.scala
index f7671ec9..fcd121bf 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/NCIDLCompiler.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/NCIDLCompiler.scala
@@ -118,7 +118,7 @@ object NCIDLCompiler extends LazyLogging:
                     case b: java.lang.Boolean if b != null => b
                     case _ => SE(s"Expecting boolean value for intent option: $k")(ctx)
 
-            import NCIDLIntentOptions._
+            import NCIDLIntentOptions.*
 
             for ((k, v) <- json)
                 if k == JSON_ORDERED then opts.ordered = boolVal(k, v)
@@ -181,12 +181,12 @@ object NCIDLCompiler extends LazyLogging:
 
         override def exitFragId(ctx: IDP.FragIdContext): Unit =
             fragId = ctx.id().getText
-            if NCIDLGlobal.getFragment(mdlCfg.getId, fragId).isDefined then SE(s"Duplicate fragment ID: $fragId")(ctx.id())
+            if NCIDLGlobal.getFragment(mdlCfg.id, fragId).isDefined then SE(s"Duplicate fragment ID: $fragId")(ctx.id())
 
         override def exitFragRef(ctx: IDP.FragRefContext): Unit =
             val id = ctx.id().getText
 
-            NCIDLGlobal.getFragment(mdlCfg.getId, id) match
+            NCIDLGlobal.getFragment(mdlCfg.id, id) match
                 case Some(frag) =>
                     val meta = if fragMeta == null then Map.empty[String, Any] else fragMeta
                     for (fragTerm <- frag.terms)
@@ -199,7 +199,7 @@ object NCIDLCompiler extends LazyLogging:
         override def exitFlowDecl(ctx: IDP.FlowDeclContext): Unit =
             val regex = NCUtils.trimQuotes(ctx.qstring().getText)
 
-            if regex != null && regex.length > 2 then flowRegex = if (regex.nonEmpty) Option(regex) else None
+            if regex != null && regex.length > 2 then flowRegex = if regex.nonEmpty then Option(regex) else None
             if flowRegex.isDefined then // Pre-check.
                 try Pattern.compile(flowRegex.get)
                 catch case e: PatternSyntaxException => SE(s"${e.getDescription} in intent flow regex '${e.getPattern}' near index ${e.getIndex}.")(ctx.qstring())
@@ -256,7 +256,7 @@ object NCIDLCompiler extends LazyLogging:
             }
 
         override def exitFrag(ctx: IDP.FragContext): Unit =
-            NCIDLGlobal.addFragment(mdlCfg.getId, NCIDLFragment(fragId, terms.toList))
+            NCIDLGlobal.addFragment(mdlCfg.id, NCIDLFragment(fragId, terms.toList))
             terms.clear()
             fragId = null
 
@@ -393,8 +393,8 @@ object NCIDLCompiler extends LazyLogging:
             case s: String => s"$s."
 
         s"""IDL $kind error in '$srcName' at line $line - $aMsg
-          |-- Model ID: ${mdlCfg.getId}
-          |-- Model origin: ${mdlCfg.getOrigin}
+          |-- Model ID: ${mdlCfg.id}
+          |-- Model origin: ${mdlCfg.origin}
           |-- Intent origin: $origin
           |--
           |-- Line:  ${hold.origStr}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/antlr4/NCIDLLexer.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/antlr4/NCIDLLexer.java
index c6e7146c..2ef3f125 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/antlr4/NCIDLLexer.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/antlr4/NCIDLLexer.java
@@ -1,13 +1,16 @@
 // Generated from C:/Users/Nikita Ivanov/Documents/GitHub/incubator-nlpcraft/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/antlr4\NCIDL.g4 by ANTLR 4.9.2
 package org.apache.nlpcraft.internal.intent.compiler.antlr4;
-import org.antlr.v4.runtime.Lexer;
+
 import org.antlr.v4.runtime.CharStream;
-import org.antlr.v4.runtime.Token;
-import org.antlr.v4.runtime.TokenStream;
-import org.antlr.v4.runtime.*;
-import org.antlr.v4.runtime.atn.*;
+import org.antlr.v4.runtime.Lexer;
+import org.antlr.v4.runtime.RuntimeMetaData;
+import org.antlr.v4.runtime.Vocabulary;
+import org.antlr.v4.runtime.VocabularyImpl;
+import org.antlr.v4.runtime.atn.ATN;
+import org.antlr.v4.runtime.atn.ATNDeserializer;
+import org.antlr.v4.runtime.atn.LexerATNSimulator;
+import org.antlr.v4.runtime.atn.PredictionContextCache;
 import org.antlr.v4.runtime.dfa.DFA;
-import org.antlr.v4.runtime.misc.*;
 
 @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
 public class NCIDLLexer extends Lexer {
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/antlr4/NCIDLParser.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/antlr4/NCIDLParser.java
index 1cedea16..065c526d 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/antlr4/NCIDLParser.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/antlr4/NCIDLParser.java
@@ -1,13 +1,26 @@
 // Generated from C:/Users/Nikita Ivanov/Documents/GitHub/incubator-nlpcraft/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/compiler/antlr4\NCIDL.g4 by ANTLR 4.9.2
 package org.apache.nlpcraft.internal.intent.compiler.antlr4;
-import org.antlr.v4.runtime.atn.*;
+
+import org.antlr.v4.runtime.FailedPredicateException;
+import org.antlr.v4.runtime.NoViableAltException;
+import org.antlr.v4.runtime.Parser;
+import org.antlr.v4.runtime.ParserRuleContext;
+import org.antlr.v4.runtime.RecognitionException;
+import org.antlr.v4.runtime.RuleContext;
+import org.antlr.v4.runtime.RuntimeMetaData;
+import org.antlr.v4.runtime.Token;
+import org.antlr.v4.runtime.TokenStream;
+import org.antlr.v4.runtime.Vocabulary;
+import org.antlr.v4.runtime.VocabularyImpl;
+import org.antlr.v4.runtime.atn.ATN;
+import org.antlr.v4.runtime.atn.ATNDeserializer;
+import org.antlr.v4.runtime.atn.ParserATNSimulator;
+import org.antlr.v4.runtime.atn.PredictionContextCache;
 import org.antlr.v4.runtime.dfa.DFA;
-import org.antlr.v4.runtime.*;
-import org.antlr.v4.runtime.misc.*;
-import org.antlr.v4.runtime.tree.*;
+import org.antlr.v4.runtime.tree.ParseTreeListener;
+import org.antlr.v4.runtime.tree.TerminalNode;
+
 import java.util.List;
-import java.util.Iterator;
-import java.util.ArrayList;
 
 @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
 public class NCIDLParser extends Parser {
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentSolverManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentSolverManager.scala
index c2c3f067..8355b07b 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentSolverManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentSolverManager.scala
@@ -25,11 +25,9 @@ import org.apache.nlpcraft.internal.dialogflow.NCDialogFlowManager
 import org.apache.nlpcraft.internal.intent.*
 
 import java.util.function.Function
-import java.util.{Collections, List as JList}
 import scala.annotation.targetName
 import scala.collection.mutable
 import scala.collection.mutable.ArrayBuffer
-import scala.jdk.CollectionConverters.*
 import scala.language.postfixOps
 
 /**
@@ -47,7 +45,7 @@ object NCIntentSolverManager:
       *
       * @param entities
       */
-    private case class IntentSolverVariant(entities: Seq[NCEntity]) extends Ordered[IntentSolverVariant]:
+    private case class IntentSolverVariant(entities: List[NCEntity]) extends Ordered[IntentSolverVariant]:
         private lazy val weights = calcWeight()
 
         private def calcSparsity(toks: Seq[NCToken]): Int =
@@ -55,7 +53,7 @@ object NCIntentSolverManager:
             idxs.zipWithIndex.tail.map { (v, i) => Math.abs(v - idxs(i - 1)) }.sum - idxs.length + 1
 
         private def calcWeight(): Seq[Int] =
-            val toks: Seq[Seq[NCToken]] = entities.map(_.getTokens.asScala.toSeq)
+            val toks: Seq[Seq[NCToken]] = entities.map(_.getTokens.toSeq)
 
             val toksCnt = toks.map(_.size).sum
             val avgToksPerEntity = if toksCnt > 0 then Math.round((entities.size.toFloat / toksCnt) * 100) else 0
@@ -78,7 +76,7 @@ object NCIntentSolverManager:
       * @param termId
       * @param entities
       */
-    private case class IntentTermEntities(termId: Option[String], entities: Seq[NCEntity])
+    private case class IntentTermEntities(termId: Option[String], entities: List[NCEntity])
 
     /**
       *
@@ -88,8 +86,8 @@ object NCIntentSolverManager:
       */
     private case class CallbackDataImpl(
         getIntentId: String,
-        getCallbackArguments: JList[JList[NCEntity]],
-        getCallback: Function[JList[JList[NCEntity]], NCResult]
+        getCallbackArguments: List[List[NCEntity]],
+        getCallback: List[List[NCEntity]] => NCResult
     ) extends NCCallbackData
 
     /**
@@ -100,7 +98,7 @@ object NCIntentSolverManager:
       * @param variant
       * @param variantIdx
       */
-    private case class IntentSolverResult(intentId: String, fn: NCIntentMatch => NCResult, groups: Seq[IntentTermEntities], variant: IntentSolverVariant, variantIdx: Int)
+    private case class IntentSolverResult(intentId: String, fn: NCIntentMatch => NCResult, groups: List[IntentTermEntities], variant: IntentSolverVariant, variantIdx: Int)
 
     /**
       * NOTE: not thread-safe.
@@ -198,8 +196,8 @@ object NCIntentSolverManager:
       * @param usedEntities
       * @param weight
       */
-    private case class TermMatch(termId: Option[String], usedEntities: Seq[IntentEntity], weight: Weight):
-        private lazy val maxIndex: Int = usedEntities.map(_.entity.getTokens.asScala.map(_.getIndex).max).max
+    private case class TermMatch(termId: Option[String], usedEntities: List[IntentEntity], weight: Weight):
+        private lazy val maxIndex: Int = usedEntities.map(_.entity.getTokens.map(_.getIndex).max).max
 
         def after(tm: TermMatch): Boolean = maxIndex > tm.maxIndex
 
@@ -207,7 +205,7 @@ object NCIntentSolverManager:
       *
       * @param entities
       */
-    private case class PredicateMatch(entities: Seq[IntentEntity], weight: Weight)
+    private case class PredicateMatch(entities: List[IntentEntity], weight: Weight)
 
     /**
       *
@@ -216,7 +214,7 @@ object NCIntentSolverManager:
       */
     private case class TermEntitiesGroup(
         term: NCIDLTerm,
-        usedEntities: Seq[IntentEntity]
+        usedEntities: List[IntentEntity]
     )
 
     /**
@@ -279,24 +277,24 @@ class NCIntentSolverManager(
 
         // Find all matches across all intents and sentence variants.
         for (
-            (vrn, vrnIdx) <- ctx.getVariants.asScala.zipWithIndex if mdl.onVariant(vrn);
-            ents = vrn.getEntities.asScala;
-            varEntsGroups = ents.filter(t => t.getGroups != null && !t.getGroups.isEmpty).map(_.getGroups.asScala);
+            (vrn, vrnIdx) <- ctx.getVariants.zipWithIndex if mdl.onVariant(vrn);
+            ents = vrn.getEntities;
+            varEntsGroups = ents.filter(t => t.getGroups != null && t.getGroups.nonEmpty).map(_.getGroups);
             (intent, callback) <- intents
         )
             val convEnts: Seq[IntentEntity] =
                 if intent.terms.exists(_.conv) then
                     // We do not mix tokens with same group from the conversation and given sentence.
-                    ctx.getConversation.getStm.asScala.toSeq.
-                        map(ent => ent -> (if ent.getGroups == null then Set.empty[String] else ent.getGroups.asScala)).
-                        filter { (ent, entGroups)  => !varEntsGroups.exists(_.subsetOf(entGroups)) }.
+                    ctx.getConversation.getStm.
+                        map(ent => ent -> (if ent.getGroups == null then Set.empty else ent.getGroups)).
+                        filter { (_, entGroups)  => !varEntsGroups.exists(_.subsetOf(entGroups)) }.
                         map { (e, _) => IntentEntity(used = false, conv = true, e) }
                 else
                     Seq.empty
 
             // Solve intent in isolation.
-            solveIntent(ctx, intent, ents.map(IntentEntity(false, false, _)).toSeq, convEnts, vrnIdx) match
-                case Some(intentMatch) => matches += MatchHolder(intentMatch, callback, IntentSolverVariant(vrn.getEntities.asScala.toSeq), vrnIdx)
+            solveIntent(ctx, intent, ents.map(IntentEntity(false, false, _)), convEnts, vrnIdx) match
+                case Some(intentMatch) => matches += MatchHolder(intentMatch, callback, IntentSolverVariant(vrn.getEntities), vrnIdx)
                 case None => // No-op.
 
         val sorted = matches.sortWith((m1: MatchHolder, m2: MatchHolder) =>
@@ -320,7 +318,7 @@ class NCIntentSolverManager(
                                 val variantPart =
                                     m.variant.
                                         entities.
-                                        map(t => s"${t.getId}${t.getGroups}${t.mkText()}").
+                                        map(t => s"${t.getId}${t.getGroups}${t.mkText}").
                                         mkString("")
 
                                 val intentPart = m.intentMatch.intent.toString
@@ -369,7 +367,7 @@ class NCIntentSolverManager(
                         var entIdx = 0
                         for (e <- grp.usedEntities)
                             val conv = if e.conv then "(conv) " else ""
-                            ents += s"    #$entIdx: $conv${e.entity.getId}(${e.entity.mkText()})"
+                            ents += s"    #$entIdx: $conv${e.entity.getId}(${e.entity.mkText})"
                             entIdx += 1
                     else
                         ents += "    <empty>"
@@ -449,7 +447,7 @@ class NCIntentSolverManager(
             var abort = false
             var lastTermMatch: TermMatch = null
             val sess = ctx.getConversation.getData // Conversation metadata (shared across all terms).
-            val convMeta = sess.keysSet().asScala.map(k => k -> sess.get(k).asInstanceOf[Object]).toMap
+            val convMeta = sess.keysSet.map(k => k -> sess.get(k).asInstanceOf[Object]).toMap
             val ents = senEnts.map(_.entity)
 
             // Check terms.
@@ -487,8 +485,8 @@ class NCIntentSolverManager(
             else
                 val usedSenEnts = senEnts.filter(_.used)
                 val usedConvEnts = convEnts.filter(_.used)
-                val usedToks = usedSenEnts.flatMap(_.entity.getTokens.asScala)
-                val unusedToks = ctx.getTokens.asScala.filter(p => !usedToks.contains(p))
+                val usedToks = usedSenEnts.flatMap(_.entity.getTokens)
+                val unusedToks = ctx.getTokens.filter(p => !usedToks.contains(p))
 
                 if !opts.allowStmEntityOnly && usedSenEnts.isEmpty && usedConvEnts.nonEmpty then
                     logger.info(
@@ -514,7 +512,7 @@ class NCIntentSolverManager(
                     // Number of remaining (unused) non-free words in the sentence is a measure of exactness of the match.
                     // The match is exact when all non-free words are used in that match.
                     // Negate to make sure the bigger (smaller negative number) is better.
-                    val nonFreeWordNum = -(ctx.getTokens.size() - senEnts.map(_.entity.getTokens.size()).sum)
+                    val nonFreeWordNum = -(ctx.getTokens.size - senEnts.map(_.entity.getTokens.size).sum)
 
                     intentW.prepend(nonFreeWordNum)
 
@@ -538,8 +536,8 @@ class NCIntentSolverManager(
         tbl += (
             "Matched Entities",
             termMatch.usedEntities.map(t =>
-                val txt = t.entity.mkText()
-                val idx = t.entity.getTokens.asScala.map(_.getIndex).mkString("{", ",", "}")
+                val txt = t.entity.mkText
+                val idx = t.entity.getTokens.map(_.getIndex).mkString("{", ",", "}")
 
                 s"$txt${s"[$idx]"}").mkString(" ")
         )
@@ -651,7 +649,7 @@ class NCIntentSolverManager(
             // Mark found entities as used.
             for (e <- usedEnts) e.used = true
 
-            Option(PredicateMatch(usedEnts.toSeq, new Weight(senTokNum, convDepthsSum, usesSum)))
+            Option(PredicateMatch(usedEnts.toList, new Weight(senTokNum, convDepthsSum, usesSum)))
 
     /**
       *
@@ -683,21 +681,21 @@ class NCIntentSolverManager(
                 data
 
         for (intentRes <- intentResults.filter(_ != null) if Loop.hasNext)
-            def mkIntentMatch(arg: JList[JList[NCEntity]]): NCIntentMatch =
+            def mkIntentMatch(arg: List[List[NCEntity]]): NCIntentMatch =
                 new NCIntentMatch:
                     override val getContext: NCContext = ctx
                     override val getIntentId: String = intentRes.intentId
-                    override val getIntentEntities: JList[JList[NCEntity]] = intentRes.groups.map(_.entities).map(_.asJava).asJava
-                    override def getTermEntities(idx: Int): JList[NCEntity] = intentRes.groups(idx).entities.asJava
-                    override def getTermEntities(termId: String): JList[NCEntity] =
+                    override val getIntentEntities: List[List[NCEntity]] = intentRes.groups.map(_.entities)
+                    override def getTermEntities(idx: Int): List[NCEntity] = intentRes.groups(idx).entities
+                    override def getTermEntities(termId: String): List[NCEntity] =
                         intentRes.groups.find(_.termId === termId) match
-                            case Some(g) => g.entities.asJava
-                            case None => Collections.emptyList()
+                            case Some(g) => g.entities
+                            case None => List.empty
                     override val getVariant: NCVariant =
                         new NCVariant:
-                            override def getEntities: JList[NCEntity] = intentRes.variant.entities.asJava
+                            override def getEntities: List[NCEntity] = intentRes.variant.entities
 
-            val im = mkIntentMatch(intentRes.groups.map(_.entities).map(_.asJava).asJava)
+            val im = mkIntentMatch(intentRes.groups.map(_.entities))
             try
                 if mdl.onMatchedIntent(im) then
                     // This can throw NCIntentSkip exception.
@@ -706,38 +704,38 @@ class NCIntentSolverManager(
                     def saveHistory(res: NCResult, im: NCIntentMatch): Unit =
                         dialog.addMatchedIntent(im, res, ctx)
                         conv.getConversation(req.getUserId).addEntities(
-                            req.getRequestId, im.getIntentEntities.asScala.flatMap(_.asScala).toSeq.distinct
+                            req.getRequestId, im.getIntentEntities.flatten.distinct
                         )
                         logger.info(s"Intent '${intentRes.intentId}' for variant #${intentRes.variantIdx + 1} selected as the <|best match|>")
 
                     def executeCallback(im: NCIntentMatch): NCResult =
                         val cbRes = intentRes.fn(im)
                         // Store winning intent match in the input.
-                        if cbRes.getIntentId == null then cbRes.setIntentId(intentRes.intentId)
+                        if cbRes.intentId == null then cbRes.intentId = intentRes.intentId
                         cbRes
 
                     def finishSearch(): Unit =
-                        val cb = new Function[JList[JList[NCEntity]], NCResult]:
-                            @volatile private var called = false
-                            override def apply(args: JList[JList[NCEntity]]): NCResult =
-                                if called then E("Callback was already called.")
-                                called = true
+                        @volatile var called = false
 
-                                val reqId = reqIds.synchronized { reqIds.getOrElse(key, null) }
+                        def f(args: List[List[NCEntity]]): NCResult =
+                            if called then E("Callback was already called.")
+                            called = true
 
-                                // TODO: text.
-                                if reqId != ctx.getRequest.getRequestId then E("Callback is out of date.")
+                            val reqId = reqIds.synchronized { reqIds.getOrElse(key, null) }
 
-                                typ match
-                                    case SEARCH =>
-                                        val imNew = mkIntentMatch(args)
-                                        val cbRes = executeCallback(imNew)
-                                        dialog.replaceLastItem(imNew, cbRes, ctx)
-                                        cbRes
-                                    case SEARCH_NO_HISTORY => executeCallback(mkIntentMatch(args))
-                                    case _ => throw new AssertionError(s"Unexpected state: $typ")
+                            // TODO: text.
+                            if reqId != ctx.getRequest.getRequestId then E("Callback is out of date.")
 
-                        Loop.finish(IterationResult(Right(CallbackDataImpl(im.getIntentId, im.getIntentEntities, cb)), im))
+                            typ match
+                                case SEARCH =>
+                                    val imNew = mkIntentMatch(args)
+                                    val cbRes = executeCallback(imNew)
+                                    dialog.replaceLastItem(imNew, cbRes, ctx)
+                                    cbRes
+                                case SEARCH_NO_HISTORY => executeCallback(mkIntentMatch(args))
+                                case _ => throw new AssertionError(s"Unexpected state: $typ")
+
+                        Loop.finish(IterationResult(Right(CallbackDataImpl(im.getIntentId, im.getIntentEntities, f)), im))
 
                     typ match
                         case REGULAR =>
@@ -750,7 +748,7 @@ class NCIntentSolverManager(
                         case SEARCH_NO_HISTORY =>
                             finishSearch()
                 else
-                    logger.info(s"Model '${ctx.getModelConfig.getId}' triggered rematching of intents by intent '${intentRes.intentId}' on variant #${intentRes.variantIdx + 1}.")
+                    logger.info(s"Model '${ctx.getModelConfig.id}' triggered rematching of intents by intent '${intentRes.intentId}' on variant #${intentRes.variantIdx + 1}.")
                     Loop.finish()
                 catch
                     case e: NCIntentSkip =>
@@ -771,7 +769,7 @@ class NCIntentSolverManager(
     def solve(mdl: NCModel, ctx: NCContext, typ: NCIntentSolveType): ResultData =
         import NCIntentSolveType.REGULAR
 
-        val key = UserModelKey(ctx.getRequest.getUserId, mdl.getConfig.getId)
+        val key = UserModelKey(ctx.getRequest.getUserId, mdl.getConfig.id)
         reqIds.synchronized { reqIds.put(key, ctx.getRequest.getRequestId)}
 
         val mdlCtxRes = mdl.onContext(ctx)
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroCompiler.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroCompiler.scala
index a385f904..fe3194b2 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroCompiler.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroCompiler.scala
@@ -17,17 +17,15 @@
 
 package org.apache.nlpcraft.internal.makro
 
-import org.apache.nlpcraft.*
 import com.typesafe.scalalogging.LazyLogging
-import org.antlr.v4.runtime.tree.ParseTreeWalker
 import org.antlr.v4.runtime.*
+import org.antlr.v4.runtime.tree.ParseTreeWalker
+import org.apache.nlpcraft.*
 import org.apache.nlpcraft.internal.*
 import org.apache.nlpcraft.internal.antlr4.*
-import NCMacroCompiler.FiniteStateMachine
+import org.apache.nlpcraft.internal.makro.NCMacroCompiler.FiniteStateMachine
 import org.apache.nlpcraft.internal.makro.antlr4.*
 import org.apache.nlpcraft.internal.util.*
-import org.apache.nlpcraft.NCException
-import org.apache.nlpcraft.internal.makro.antlr4.NCMacroDslLexer
 
 import scala.collection.mutable
 
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroJavaParser.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroJavaParser.java
deleted file mode 100644
index 0b9ce644..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroJavaParser.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.internal.makro;
-
-import java.util.Set;
-
-/**
- * Java adapter for macro parser (so that Java reflection could work).
- */
-public class NCMacroJavaParser implements NCMacroJavaParserTrait {
-    private final NCMacroParser impl = new NCMacroParser();
-
-    /**
-     * Expands given macro DSL string.
-     *
-     * @param s Macro DSL string to expand.
-     * @return Set of macro expansions for a given macro DSL string.
-     */
-    public Set<String> expand(String s) {
-        return impl.expandJava(s);
-    }
-
-    /**
-     * Adds or overrides given macro.
-     *
-     * @param name Macro name (typically an upper case string).
-     *     It must start with '&lt;' and end with '&gt;' symbol.
-     * @param macro Value of the macro (any arbitrary string).
-     * @return {@code true} if an existing macro was overridden, {@code false} otherwise.
-     */
-    public boolean addMacro(String name, String macro) {
-        boolean f = impl.hasMacro(name);
-
-        impl.addMacro(name, macro);
-
-        return f;
-    }
-
-    /**
-     * Removes macro.
-     *
-     * @param name Name of the macro to remove.
-     * @return {@code true} if given macro was indeed found and removed, {@code false} otherwise.
-     */
-    public boolean removeMacro(String name) {
-        boolean f = impl.hasMacro(name);
-
-        impl.removeMacro(name);
-
-        return f;
-    }
-
-    /**
-     * Tests whether this processor has given macro.
-     *
-     * @param name Name of the macro to test.
-     * @return {@code true} if macro was found, {@code false} otherwise.
-     */
-    public boolean hasMacro(String name) {
-        return impl.hasMacro(name);
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroJavaParserTrait.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroJavaParserTrait.java
deleted file mode 100644
index 9177a54e..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroJavaParserTrait.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.internal.makro;
-
-import java.util.Set;
-
-/**
- * Necessary plug for Javadoc to work on mixed Java/Scala project.
- */
-public interface NCMacroJavaParserTrait {
-    /**
-     * Expands given macro DSL string.
-     *
-     * @param s Macro DSL string to expand.
-     * @return Set of macro expansions for a given macro DSL string.
-     */
-    Set<String> expand(String s);
-
-    /**
-     * Adds or overrides given macro.
-     *
-     * @param name Macro name (typically an upper case string).
-     *     It must start with '&lt;' and end with '&gt;' symbol.
-     * @param macro Value of the macro (any arbitrary string).
-     * @return {@code true} if an existing macro was overridden, {@code false} otherwise.
-     */
-    boolean addMacro(String name, String macro);
-
-    /**
-     * Removes macro.
-     *
-     * @param name Name of the macro to remove.
-     * @return {@code true} if given macro was indeed found and removed, {@code false} otherwise.
-     */
-    boolean removeMacro(String name);
-
-    /**
-     * Tests whether this processor has given macro.
-     *
-     * @param name Name of the macro to test.
-     * @return {@code true} if macro was found, {@code false} otherwise.
-     */
-    boolean hasMacro(String name);
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroParser.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroParser.scala
index 5ef05685..040e7a69 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroParser.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroParser.scala
@@ -20,7 +20,6 @@ package org.apache.nlpcraft.internal.makro
 import org.apache.nlpcraft.*
 import org.apache.nlpcraft.internal.util.NCUtils
 
-import scala.jdk.CollectionConverters.*
 
 object NCMacroParser:
     private final val ESC_CHARS = """{}\<>_[]|,"""
@@ -78,7 +77,7 @@ object NCMacroParser:
 class NCMacroParser:
     import NCMacroParser.*
 
-    private val macros = new java.util.concurrent.ConcurrentHashMap[String, String]().asScala
+    private val macros = scala.collection.mutable.HashMap.empty[String, String]
 
     /**
       * Trims all duplicate spaces.
@@ -138,13 +137,6 @@ class NCMacroParser:
 
         NCUtils.distinct(NCMacroCompiler.compile(s).toList map trimDupSpaces map processEscapes)
 
-    /**
-      * Expand given string.
-      *
-      * @param txt Text to expand.
-      */
-    def expandJava(txt: String): java.util.Set[String] =
-        expand(txt).toSet.asJava
 
     /**
       * Checks macro name.
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/antlr4/NCMacroDslLexer.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/antlr4/NCMacroDslLexer.java
index 8639938a..64057c91 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/antlr4/NCMacroDslLexer.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/antlr4/NCMacroDslLexer.java
@@ -1,13 +1,16 @@
 // Generated from /Users/nivanov/incubator-nlpcraft/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/antlr4/NCMacroDsl.g4 by ANTLR 4.9.2
 package org.apache.nlpcraft.internal.makro.antlr4;
-import org.antlr.v4.runtime.Lexer;
+
 import org.antlr.v4.runtime.CharStream;
-import org.antlr.v4.runtime.Token;
-import org.antlr.v4.runtime.TokenStream;
-import org.antlr.v4.runtime.*;
-import org.antlr.v4.runtime.atn.*;
+import org.antlr.v4.runtime.Lexer;
+import org.antlr.v4.runtime.RuntimeMetaData;
+import org.antlr.v4.runtime.Vocabulary;
+import org.antlr.v4.runtime.VocabularyImpl;
+import org.antlr.v4.runtime.atn.ATN;
+import org.antlr.v4.runtime.atn.ATNDeserializer;
+import org.antlr.v4.runtime.atn.LexerATNSimulator;
+import org.antlr.v4.runtime.atn.PredictionContextCache;
 import org.antlr.v4.runtime.dfa.DFA;
-import org.antlr.v4.runtime.misc.*;
 
 @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
 public class NCMacroDslLexer extends Lexer {
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/antlr4/NCMacroDslParser.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/antlr4/NCMacroDslParser.java
index dabc81a5..f6f95f4b 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/antlr4/NCMacroDslParser.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/antlr4/NCMacroDslParser.java
@@ -1,13 +1,24 @@
 // Generated from /Users/nivanov/incubator-nlpcraft/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/antlr4/NCMacroDsl.g4 by ANTLR 4.9.2
 package org.apache.nlpcraft.internal.makro.antlr4;
-import org.antlr.v4.runtime.atn.*;
+
+import org.antlr.v4.runtime.FailedPredicateException;
+import org.antlr.v4.runtime.NoViableAltException;
+import org.antlr.v4.runtime.Parser;
+import org.antlr.v4.runtime.ParserRuleContext;
+import org.antlr.v4.runtime.RecognitionException;
+import org.antlr.v4.runtime.RuleContext;
+import org.antlr.v4.runtime.RuntimeMetaData;
+import org.antlr.v4.runtime.Token;
+import org.antlr.v4.runtime.TokenStream;
+import org.antlr.v4.runtime.Vocabulary;
+import org.antlr.v4.runtime.VocabularyImpl;
+import org.antlr.v4.runtime.atn.ATN;
+import org.antlr.v4.runtime.atn.ATNDeserializer;
+import org.antlr.v4.runtime.atn.ParserATNSimulator;
+import org.antlr.v4.runtime.atn.PredictionContextCache;
 import org.antlr.v4.runtime.dfa.DFA;
-import org.antlr.v4.runtime.*;
-import org.antlr.v4.runtime.misc.*;
-import org.antlr.v4.runtime.tree.*;
-import java.util.List;
-import java.util.Iterator;
-import java.util.ArrayList;
+import org.antlr.v4.runtime.tree.ParseTreeListener;
+import org.antlr.v4.runtime.tree.TerminalNode;
 
 @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
 public class NCMacroDslParser extends Parser {
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/util/NCUtils.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/util/NCUtils.scala
index b6d3dd97..c19e626a 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/util/NCUtils.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/util/NCUtils.scala
@@ -37,7 +37,6 @@ import scala.concurrent.duration.Duration
 import scala.io.*
 import scala.sys.SystemProperties
 import scala.util.Using
-
 /**
   * 
   */
@@ -776,12 +775,13 @@ object NCUtils extends LazyLogging:
         catch
             case e: IOException => E(s"Failed to read stream.", e)
 
+
     /**
       *
       * @param bodies
       * @param ec
       */
-    def execPar(bodies: (() => Any)*)(ec: ExecutionContext): Unit =
+    def execPar(bodies: Seq[() => Any])(ec: ExecutionContext): Unit =
         val errs = new CopyOnWriteArrayList[Throwable]()
 
         bodies.map(body => Future {
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCNLPEntityParser.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCNLPEntityParser.java
deleted file mode 100644
index d2e2b6f4..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCNLPEntityParser.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.nlp.entity.parser;
-
-import org.apache.nlpcraft.NCEntity;
-import org.apache.nlpcraft.NCEntityParser;
-import org.apache.nlpcraft.NCModelConfig;
-import org.apache.nlpcraft.NCRequest;
-import org.apache.nlpcraft.NCToken;
-import org.apache.nlpcraft.nlp.entity.parser.impl.NCNLPEntityParserImpl;
-
-import java.util.List;
-
-/**
- * TODO: add javadoc based on comments below.
- *
- * Component is language independent.
- */
-public class NCNLPEntityParser implements NCEntityParser {
-    private final NCNLPEntityParserImpl impl = new NCNLPEntityParserImpl();
-
-    @Override
-    public List<NCEntity> parse(NCRequest req, NCModelConfig cfg, List<NCToken> toks) {
-        return impl.parse(req, cfg, toks);
-    }
-
-    @Override
-    public void onStart(NCModelConfig cfg) {
-        impl.onStart(cfg);
-    }
-
-    @Override
-    public void onStop(NCModelConfig cfg) {
-        impl.onStop(cfg);
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/NCNLPEntityParserImpl.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCNLPEntityParser.scala
similarity index 72%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/NCNLPEntityParserImpl.scala
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCNLPEntityParser.scala
index 8a49e1b8..7a046cf7 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/NCNLPEntityParserImpl.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCNLPEntityParser.scala
@@ -15,36 +15,34 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.nlp.entity.parser.impl
+package org.apache.nlpcraft.nlp.entity.parser
 
 import org.apache.nlpcraft.*
 
 import java.util
-import java.util.List as JList
 import java.util.stream.Collectors
 
 /**
   *
   */
-object NCNLPEntityParserImpl:
+object NCNLPEntityParser:
     private def id = "nlp:token"
 
-import org.apache.nlpcraft.nlp.entity.parser.impl.NCNLPEntityParserImpl.*
-
+import org.apache.nlpcraft.nlp.entity.parser.NCNLPEntityParser.*
 /**
   *
   */
-class NCNLPEntityParserImpl extends NCEntityParser:
-    override def parse(req: NCRequest, cfg: NCModelConfig, toks: JList[NCToken]): JList[NCEntity] =
-        toks.stream().map(t =>
+class NCNLPEntityParser extends NCEntityParser:
+    override def parse(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): List[NCEntity] =
+        toks.map(t =>
             new NCPropertyMapAdapter with NCEntity:
                 put(s"$id:text", t.getText)
                 put(s"$id:index", t.getIndex)
                 put(s"$id:startCharIndex", t.getStartCharIndex)
 
-                t.keysSet().forEach(key => put(s"$id:$key", t.get(key)))
+                t.keysSet.foreach(key => put(s"$id:$key", t.get(key)))
 
-                override val getTokens: JList[NCToken] = util.Collections.singletonList(t)
+                override val getTokens: List[NCToken] = List(t)
                 override val getRequestId: String = req.getRequestId
                 override val getId: String = id
-        ).collect(Collectors.toList)
+        )
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCOpenNLPEntityParser.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCOpenNLPEntityParser.java
deleted file mode 100644
index 270e39da..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCOpenNLPEntityParser.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.nlp.entity.parser;
-
-import org.apache.nlpcraft.NCEntity;
-import org.apache.nlpcraft.NCEntityParser;
-import org.apache.nlpcraft.NCException;
-import org.apache.nlpcraft.NCModelConfig;
-import org.apache.nlpcraft.NCRequest;
-import org.apache.nlpcraft.NCToken;
-import org.apache.nlpcraft.nlp.entity.parser.impl.NCOpenNLPEntityParserImpl;
-
-import java.util.List;
-import java.util.Objects;
-
-/**
- *
- * Generates entities with
- *  - ID `opennlp:{name}` where 'name' is element model name (from trained file or resource) and
- *  - one property `opennlp:{name}:probability`, where probability is double value between 0 and 1.
- *
- * <p>
- * Models can be download here: http://opennlp.sourceforge.net/models-1.5/ or trained.
- * <p>
- * Component is language independent.
- * <p>
- */
-public class NCOpenNLPEntityParser implements NCEntityParser {
-    private final NCOpenNLPEntityParserImpl impl;
-
-    /**
-     * @param mdlSrc
-     */
-    public NCOpenNLPEntityParser(String mdlSrc) {
-        Objects.requireNonNull(mdlSrc, "Model source cannot be null.");
-
-        this.impl = new NCOpenNLPEntityParserImpl(java.util.Collections.singletonList(mdlSrc));
-    }
-
-    /**
-     * @param mdlSrcs
-     */
-    public NCOpenNLPEntityParser(List<String> mdlSrcs) {
-        Objects.requireNonNull(mdlSrcs, "Model sources cannot be null.");
-        if (mdlSrcs.size() == 0) throw new NCException("Model sources cannot be empty.");
-
-        this.impl = new NCOpenNLPEntityParserImpl(mdlSrcs);
-    }
-
-    @Override
-    public List<NCEntity> parse(NCRequest req, NCModelConfig cfg, List<NCToken> toks) {
-        return impl.parse(req, cfg, toks);
-    }
-
-    @Override
-    public void onStart(NCModelConfig cfg) {
-        impl.onStart(cfg);
-    }
-
-    @Override
-    public void onStop(NCModelConfig cfg) {
-        impl.onStop(cfg);
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/NCOpenNLPEntityParserImpl.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCOpenNLPEntityParser.scala
similarity index 78%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/NCOpenNLPEntityParserImpl.scala
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCOpenNLPEntityParser.scala
index 9498493a..c2bf53bb 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/NCOpenNLPEntityParserImpl.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCOpenNLPEntityParser.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.nlp.entity.parser.impl
+package org.apache.nlpcraft.nlp.entity.parser
 
 import com.typesafe.scalalogging.LazyLogging
 import opennlp.tools.namefind.*
@@ -24,7 +24,7 @@ import org.apache.nlpcraft.internal.util.NCUtils
 
 import java.io.*
 import java.util
-import java.util.{Optional, List as JList, Map as JMap}
+import java.util.Objects
 import scala.Option.*
 import scala.collection.mutable
 import scala.concurrent.ExecutionContext
@@ -32,29 +32,33 @@ import scala.jdk.CollectionConverters.*
 import scala.language.postfixOps
 import scala.util.Using
 
+object NCOpenNLPEntityParser:
+    def apply(src: String): NCOpenNLPEntityParser =
+        Objects.requireNonNull(src, "Model source cannot be null.")
+        new NCOpenNLPEntityParser(List(src))
+
 /**
   *
   * @param srcs
   */
-class NCOpenNLPEntityParserImpl(srcs: JList[String]) extends NCEntityParser with LazyLogging :
-    require(srcs != null)
+class NCOpenNLPEntityParser(srcs: List[String]) extends NCEntityParser with LazyLogging:
+    Objects.requireNonNull(srcs, "Models source cannot be null.")
 
     private var finders: Seq[NameFinderME] = _
     private case class Holder(start: Int, end: Int, name: String, probability: Double)
 
     init()
-
     /**
       *
       */
     private def init(): Unit =
         val finders = mutable.ArrayBuffer.empty[NameFinderME]
         NCUtils.execPar(
-            srcs.asScala.toSeq.map(res => () => {
+            srcs.map(res => () => {
                 val f = new NameFinderME(new TokenNameFinderModel(NCUtils.getStream(res)))
                 logger.trace(s"Loaded resource: $res")
                 finders.synchronized { finders += f }
-            })*)(ExecutionContext.Implicits.global)
+            }))(ExecutionContext.Implicits.global)
 
         this.finders = finders.toSeq
 
@@ -69,8 +73,7 @@ class NCOpenNLPEntityParserImpl(srcs: JList[String]) extends NCEntityParser with
         finally finder.clearAdaptiveData()
     }
 
-    override def parse(req: NCRequest, cfg: NCModelConfig, toksList: JList[NCToken]): JList[NCEntity] =
-        val toks = toksList.asScala
+    override def parse(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): List[NCEntity] =
         val txtArr = toks.map(_.getText).toArray
 
         finders.flatMap(find(_, txtArr)).flatMap(h => {
@@ -86,8 +89,8 @@ class NCOpenNLPEntityParserImpl(srcs: JList[String]) extends NCEntityParser with
                 new NCPropertyMapAdapter with NCEntity:
                     put(s"opennlp:${h.name}:probability", h.probability)
 
-                    override val getTokens: JList[NCToken] = toks.flatMap(t => Option.when(t.getIndex >= i1 && t.getIndex <= i2)(t)).asJava
+                    override val getTokens: List[NCToken] = toks.flatMap(t => Option.when(t.getIndex >= i1 && t.getIndex <= i2)(t))
                     override val getRequestId: String = req.getRequestId
                     override val getId: String = s"opennlp:${h.name}"
             )
-        }).asJava
\ No newline at end of file
+        }).toList
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCVariantFilter.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticElement.scala
similarity index 72%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/NCVariantFilter.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticElement.scala
index 35fabac9..28480f24 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCVariantFilter.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticElement.scala
@@ -14,20 +14,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package org.apache.nlpcraft.nlp.entity.parser.semantic
 
-package org.apache.nlpcraft;
+trait NCSemanticElement:
+    def getId: String
+    def getGroups: Set[String] = Set(getId)
+    def getValues: Map[String, Set[String]] = Map.empty
+    def getSynonyms: Set[String] = Set.empty
 
-import java.util.List;
-
-/**
- *
- */
-public interface NCVariantFilter extends NCLifecycle {
-    /**
-     * 
-     * @param req
-     * @param cfg
-     * @param vars
-     */
-    List<NCVariant> filter(NCRequest req, NCModelConfig cfg, List<NCVariant> vars);
-}
+    def getProperties: Map[String, AnyRef] = Map.empty
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticEntityParser.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticEntityParser.java
deleted file mode 100644
index 82038408..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticEntityParser.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.nlp.entity.parser.semantic;
-
-import org.apache.nlpcraft.NCEntity;
-import org.apache.nlpcraft.NCEntityParser;
-import org.apache.nlpcraft.NCException;
-import org.apache.nlpcraft.NCModelConfig;
-import org.apache.nlpcraft.NCRequest;
-import org.apache.nlpcraft.NCToken;
-import org.apache.nlpcraft.NCTokenParser;
-import org.apache.nlpcraft.nlp.entity.parser.impl.semantic.NCSemanticEntityParserImpl;
-
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-
-/**
- *
- */
-public class NCSemanticEntityParser implements NCEntityParser {
-    private final NCSemanticEntityParserImpl impl;
-
-    /**
-     *
-     * @param stemmer
-     * @param parser
-     * @param elms
-     */
-    public NCSemanticEntityParser(NCSemanticStemmer stemmer, NCTokenParser parser, List<NCSemanticElement> elms) {
-        Objects.requireNonNull(stemmer, "Stemmer cannot be null.");
-        Objects.requireNonNull(parser, "Parser cannot be null.");
-        Objects.requireNonNull(elms, "Elements cannot be null.");
-        if (elms.size() == 0) throw new NCException("Element list cannot be empty.");
-
-        impl = NCSemanticEntityParserImpl.apply(stemmer, parser, Collections.emptyMap(), elms);
-    }
-
-    /**
-     *
-     * @param stemmer
-     * @param parser
-     * @param macros
-     * @param elms
-     */
-    public NCSemanticEntityParser(NCSemanticStemmer stemmer, NCTokenParser parser, Map<String, String> macros, List<NCSemanticElement> elms) {
-        Objects.requireNonNull(stemmer, "Stemmer cannot be null.");
-        Objects.requireNonNull(parser, "Parser cannot be null.");
-        Objects.requireNonNull(elms, "Elements cannot be null.");
-        if (elms.size() == 0) throw new NCException("Element list cannot be empty.");
-
-        impl = NCSemanticEntityParserImpl.apply(stemmer, parser, macros, elms);
-    }
-
-    /**
-     *
-     * @param stemmer
-     * @param src
-     */
-    public NCSemanticEntityParser(NCSemanticStemmer stemmer, NCTokenParser parser, String src) {
-        Objects.requireNonNull(stemmer, "Stemmer cannot be null.");
-        Objects.requireNonNull(parser, "Parser cannot be null.");
-        Objects.requireNonNull(src, "Source cannot be null.");
-
-        impl = NCSemanticEntityParserImpl.apply(stemmer, parser, src);
-    }
-
-    @Override
-    public List<NCEntity> parse(NCRequest req, NCModelConfig cfg, List<NCToken> toks) {
-        return impl.parse(req, cfg, toks);
-    }
-
-    @Override
-    public void onStart(NCModelConfig cfg) {
-        impl.onStart(cfg);
-    }
-
-    @Override
-    public void onStop(NCModelConfig cfg) {
-        impl.onStop(cfg);
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/semantic/NCSemanticEntityParserImpl.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticEntityParser.scala
similarity index 81%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/semantic/NCSemanticEntityParserImpl.scala
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticEntityParser.scala
index f960c9d4..74673fb9 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/semantic/NCSemanticEntityParserImpl.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticEntityParser.scala
@@ -15,28 +15,26 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.nlp.entity.parser.impl.semantic
+package org.apache.nlpcraft.nlp.entity.parser.semantic
 
 import com.typesafe.scalalogging.LazyLogging
 import org.apache.nlpcraft.*
 import org.apache.nlpcraft.internal.makro.NCMacroParser
 import org.apache.nlpcraft.internal.util.NCUtils
-import org.apache.nlpcraft.nlp.entity.parser.impl.semantic.*
-import org.apache.nlpcraft.nlp.entity.parser.impl.semantic.NCSemanticEntityParserImpl.combine
 import org.apache.nlpcraft.nlp.entity.parser.semantic.*
+import org.apache.nlpcraft.nlp.entity.parser.semantic.impl.*
 
 import java.io.*
 import java.util
+import java.util.Objects
 import java.util.regex.*
-import java.util.{List as JList, Map as JMap, Set as JSet}
 import scala.annotation.tailrec
 import scala.collection.mutable
-import scala.jdk.CollectionConverters.*
 
 /**
   *
   */
-object NCSemanticEntityParserImpl:
+object NCSemanticEntityParser:
     /**
       *
       * @param stemmer
@@ -48,17 +46,12 @@ object NCSemanticEntityParserImpl:
     def apply(
         stemmer: NCSemanticStemmer,
         parser: NCTokenParser,
-        macros: JMap[String, String],
-        elms: JList[NCSemanticElement]
-    ): NCSemanticEntityParserImpl =
+        macros: Map[String, String],
+        elms: List[NCSemanticElement]
+    ): NCSemanticEntityParser =
         require(elms != null)
 
-        new NCSemanticEntityParserImpl(
-            stemmer,
-            parser,
-            macros = if macros == null then null else macros.asScala.toMap,
-            elements = elms.asScala.toSeq
-        )
+        new NCSemanticEntityParser(stemmer, parser, macros = macros, elements = elms)
 
     /**
       *
@@ -67,23 +60,23 @@ object NCSemanticEntityParserImpl:
       * @param src
       * @return
       */
-    def apply(stemmer: NCSemanticStemmer, parser: NCTokenParser, src: String): NCSemanticEntityParserImpl =
+    def apply(stemmer: NCSemanticStemmer, parser: NCTokenParser, src: String): NCSemanticEntityParser =
         require(src != null)
 
-        new NCSemanticEntityParserImpl(stemmer, parser, mdlSrc = src, scrType = NCSemanticSourceType.detect(src))
+        new NCSemanticEntityParser(stemmer, parser, mdlSrc = src)
 
     /**
       * @param baseTokens Tokens.
       * @param variants Variants without stopwords.
       */
-    private case class Piece(baseTokens: Seq[NCToken], variants: Seq[Seq[NCToken]])
+    private case class Piece(baseTokens: List[NCToken], variants: List[List[NCToken]])
 
     /**
       *
       * @param t
       * @return
       */
-    private def isStopWord(t: NCToken): Boolean = t.getOpt[Boolean]("stopword").orElse(false)
+    private def isStopWord(t: NCToken): Boolean = t.getOpt[Boolean]("stopword").getOrElse(false)
 
     /**
       *
@@ -133,10 +126,11 @@ object NCSemanticEntityParserImpl:
             stops4Delete = stops4Delete.filter(seq => !seq.contains(combo.head) && !seq.contains(combo.last))
 
             Piece(
-                combo,
+                combo.toList,
                 stops4Delete.
                     map(_.toSet).
-                    map(del => combo.filter(t => !del.contains(t))).filter(_.nonEmpty).sortBy(-_.size)
+                    map(del => combo.filter(t => !del.contains(t)).toList).filter(_.nonEmpty).sortBy(-_.size).
+                    toList
             )
         })
 
@@ -157,7 +151,7 @@ object NCSemanticEntityParserImpl:
         else if i >= data1.size then tmp
         else combine(data1, data2, i + 1, tmp.map(_ :+ data1(i)) ++ tmp.map(_ :+ data2(i)))
 
-import org.apache.nlpcraft.nlp.entity.parser.impl.semantic.NCSemanticEntityParserImpl.*
+import org.apache.nlpcraft.nlp.entity.parser.semantic.NCSemanticEntityParser.*
 
 /**
   *
@@ -166,24 +160,26 @@ import org.apache.nlpcraft.nlp.entity.parser.impl.semantic.NCSemanticEntityParse
   * @param macros
   * @param elements
   * @param mdlSrc
-  * @param scrType
   */
-class NCSemanticEntityParserImpl(
+class NCSemanticEntityParser(
     stemmer: NCSemanticStemmer,
     parser: NCTokenParser,
-    macros: Map[String, String] = null,
-    elements: Seq[NCSemanticElement] = null,
-    mdlSrc: String = null,
-    scrType: NCSemanticSourceType = null
+    macros: Map[String, String] = Map.empty,
+    elements: List[NCSemanticElement] = List.empty,
+    mdlSrc: String = null
 ) extends NCEntityParser with LazyLogging:
-    require(stemmer != null && parser != null)
-    require(elements != null || mdlSrc != null && scrType != null)
+    Objects.requireNonNull(stemmer, "Stemmer cannot be null.")
+    Objects.requireNonNull(parser, "Parser cannot be null.")
+
+    // TODO: exception.
+    require(elements != null && elements.nonEmpty || mdlSrc != null && mdlSrc.nonEmpty)
+
+    private val scrType = if mdlSrc != null then NCSemanticSourceType.detect(mdlSrc) else null
 
     private var synsHolder: NCSemanticSynonymsHolder = _
     private var elemsMap: Map[String, NCSemanticElement] = _
 
     init()
-
     /**
       *
       */
@@ -207,18 +203,17 @@ class NCSemanticEntityParserImpl(
       */
     private def warnMissedProperty(name: String): Unit = logger.warn(s"'$name' property not found. Is proper token enricher configured?")
 
-    override def parse(req: NCRequest, cfg: NCModelConfig, toksList: JList[NCToken]): JList[NCEntity] =
-        val toks = toksList.asScala.toSeq
-        if toks.exists(_.get[String]("stopword") == null) then warnMissedProperty("stopword")
+    override def parse(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): List[NCEntity] =
+        if toks.exists(_.getOpt[String]("stopword").isEmpty) then warnMissedProperty("stopword")
 
         val stems = toks.map(p => p -> stemmer.stem(p.getText.toLowerCase)).toMap
         val stems4Lemms =
             var ok = true
             val seq =
-                for (t <- toks; lemma = t.get[String]("lemma") if ok)
+                for (t <- toks; lemmaOpt = t.getOpt[String]("lemma") if ok)
                     yield
-                        ok = lemma != null
-                        t -> lemma
+                        ok = lemmaOpt.isDefined
+                        t -> lemmaOpt.orNull
 
             if ok then
                 seq.toMap.map { (tok, lemma) => tok -> stemmer.stem(lemma.toLowerCase) }
@@ -228,7 +223,7 @@ class NCSemanticEntityParserImpl(
 
         val cache = mutable.HashSet.empty[Seq[Int]] // Variants (tokens without stopwords) can be repeated.
 
-        case class Holder(elemId: String, tokens: Seq[NCToken], value: Option[String]):
+        case class Holder(elemId: String, tokens: List[NCToken], value: Option[String]):
             val tokensSet = tokens.toSet
             val idxs = tokensSet.map(_.getIndex)
 
@@ -256,7 +251,7 @@ class NCSemanticEntityParserImpl(
                                         elems.foreach(elem => add(elem.elementId, elem.value))
                                     case None => // No-op.
                         // With regex.
-                        for ((elemId, syns) <- synsHolder.mixedSynonyms.getOrElse(variant.size, Seq.empty))
+                        for ((elemId, syns) <- synsHolder.mixedSynonyms.getOrElse(variant.size, List.empty))
                             found = false
 
                             for (s <- syns if !found)
@@ -289,13 +284,13 @@ class NCSemanticEntityParserImpl(
         hs.toSeq.map(h => {
             val e = elemsMap(h.elemId)
             new NCPropertyMapAdapter with NCEntity:
-                if (e.getProperties != null) e.getProperties.asScala.foreach { (k, v) => put(s"${h.elemId}:$k", v) }
+                if e.getProperties != null then e.getProperties.foreach { (k, v) => put(s"${h.elemId}:$k", v) }
                 h.value match
                     case Some(value) => put(s"${h.elemId}:value", value)
                     case None => // No-op.
 
-                override val getTokens: JList[NCToken] = h.tokens.asJava
+                override val getTokens: List[NCToken] = h.tokens
                 override val getRequestId: String = req.getRequestId
                 override val getId: String = h.elemId
-                override val getGroups: JSet[String] = e.getGroups
-        }).asJava
\ No newline at end of file
+                override val getGroups: Set[String] = e.getGroups
+        }).toList
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/package-info.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticStemmer.scala
similarity index 81%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/package-info.java
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticStemmer.scala
index f6d2488c..eb36e688 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/package-info.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticStemmer.scala
@@ -6,7 +6,7 @@
  * (the "License"); you may not use this file except in compliance with
  * the License.  You may obtain a copy of the License at
  *
- *      http://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -14,8 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package org.apache.nlpcraft.nlp.entity.parser.semantic
 
-/**
- * Contains built-in token enrichers.
- */
-package org.apache.nlpcraft.nlp.token.enricher;
\ No newline at end of file
+trait NCSemanticStemmer:
+    def stem(txt: String): String
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/semantic/NCSemanticSourceReader.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/impl/NCSemanticSourceReader.scala
similarity index 66%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/semantic/NCSemanticSourceReader.scala
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/impl/NCSemanticSourceReader.scala
index 74baf905..86d59eed 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/semantic/NCSemanticSourceReader.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/impl/NCSemanticSourceReader.scala
@@ -14,26 +14,24 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nlpcraft.nlp.entity.parser.impl.semantic
+package org.apache.nlpcraft.nlp.entity.parser.semantic.impl
 
 import com.fasterxml.jackson.core.JsonParser
 import com.fasterxml.jackson.databind.*
 import com.fasterxml.jackson.dataformat.yaml.*
 import com.fasterxml.jackson.module.scala.DefaultScalaModule
 import org.apache.nlpcraft.*
-import org.apache.nlpcraft.nlp.entity.parser.impl.semantic.*
-import org.apache.nlpcraft.nlp.entity.parser.impl.semantic.NCSemanticSourceType.*
 import org.apache.nlpcraft.nlp.entity.parser.semantic.*
+import org.apache.nlpcraft.nlp.entity.parser.semantic.impl.NCSemanticSourceType.*
 
 import java.io.InputStream
 import java.util
-import java.util.{List as JList, Map as JMap, Set as JSet}
 import scala.jdk.CollectionConverters.*
 
 /**
   *
   */
-private[impl] object NCSemanticSourceType:
+private[semantic] object NCSemanticSourceType:
     def detect(src: String): NCSemanticSourceType =
         val lc = src.toLowerCase
 
@@ -44,18 +42,18 @@ private[impl] object NCSemanticSourceType:
 /**
   *
   */
-private[impl] enum NCSemanticSourceType:
+private[semantic] enum NCSemanticSourceType:
     case JSON, YAML
 
 /**
   *
   */
-private[impl] case class NCSemanticSourceData(macros: Map[String, String], elements: Seq[NCSemanticElement])
+private[semantic] case class NCSemanticSourceData(macros: Map[String, String], elements: Seq[NCSemanticElement])
 
 /**
   *
   */
-private[impl] object NCSemanticSourceReader:
+private[semantic] object NCSemanticSourceReader:
     case class Element  (
         id: String,
         description: String,
@@ -66,26 +64,20 @@ private[impl] object NCSemanticSourceReader:
     )
     case class Source(macros: Map[String, String], elements: Seq[Element])
 
-    private def nvlGroups[T](seq: Seq[T]): JSet[T] = if seq == null then null else new util.HashSet[T](seq.asJava)
-    private def nvlSynonyms[T](set: Set[T]): JSet[T] = if set == null then null else set.asJava
     private def nvlElements[T, R](seq: Seq[T], to: T => R): Seq[R] = if seq == null then Seq.empty else seq.map(to)
-    private def nvlValues(m: Map[String, Set[String]]): JMap[String, JSet[String]] =
-        if m == null then null else m.map { (k, v) => k -> v.asJava }.asJava
-    private def nvlProperties(m: Map[String, AnyRef]): JMap[String, Object] =
-        if m == null then null else m.asJava
 
     private def convertElement(e: Element): NCSemanticElement =
         if e == null then null
         else
             new NCPropertyMapAdapter with NCSemanticElement:
                 override val getId: String = e.id
-                override val getGroups: JSet[String] =
-                    val gs = nvlGroups(e.groups)
+                override val getGroups: Set[String] =
+                    val gs = e.groups
 
-                    if gs != null && !gs.isEmpty then gs else super.getGroups
-                override val getValues: JMap[String, JSet[String]] = nvlValues(e.values)
-                override val getSynonyms: JSet[String] = nvlSynonyms(e.synonyms)
-                override val getProperties: JMap[String, AnyRef] = nvlProperties(e.properties)
+                    if gs != null && gs.nonEmpty then gs.toSet else super.getGroups
+                override val getValues: Map[String, Set[String]] = e.values
+                override val getSynonyms: Set[String] = e.synonyms
+                override val getProperties: Map[String, AnyRef] = e.properties
 
     /**
       *
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/semantic/NCSemanticSynonym.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/impl/NCSemanticSynonym.scala
similarity index 91%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/semantic/NCSemanticSynonym.scala
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/impl/NCSemanticSynonym.scala
index a5fe834e..9ead7345 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/semantic/NCSemanticSynonym.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/impl/NCSemanticSynonym.scala
@@ -14,11 +14,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nlpcraft.nlp.entity.parser.impl.semantic
+package org.apache.nlpcraft.nlp.entity.parser.semantic.impl
 
 import org.apache.nlpcraft.NCToken
-import org.apache.nlpcraft.nlp.entity.parser.impl.semantic.*
-import org.apache.nlpcraft.nlp.entity.parser.impl.semantic.NCSemanticChunkKind.*
+import org.apache.nlpcraft.nlp.entity.parser.semantic.impl.NCSemanticChunkKind.*
 
 import java.util.regex.Pattern
 
@@ -37,14 +36,13 @@ private[impl] enum NCSemanticChunkKind:
   */
 private[impl] case class NCSemanticSynonymChunk(
     kind: NCSemanticChunkKind, text: String, stem: String = null, regex: Pattern = null
-) {
+):
     require(text != null && kind != null)
     require(stem != null ^ regex != null)
 
     val isText: Boolean = stem != null
 
     override def toString = s"($text|$kind)"
-}
 
 /**
   *
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/semantic/NCSemanticSynonymsProcessor.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/impl/NCSemanticSynonymsProcessor.scala
similarity index 88%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/semantic/NCSemanticSynonymsProcessor.scala
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/impl/NCSemanticSynonymsProcessor.scala
index 07e2069a..8aa82f12 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/impl/semantic/NCSemanticSynonymsProcessor.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/impl/NCSemanticSynonymsProcessor.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nlpcraft.nlp.entity.parser.impl.semantic
+package org.apache.nlpcraft.nlp.entity.parser.semantic.impl
 
 import com.fasterxml.jackson.databind.*
 import com.fasterxml.jackson.dataformat.yaml.*
@@ -23,31 +23,28 @@ import com.typesafe.scalalogging.LazyLogging
 import org.apache.nlpcraft.*
 import org.apache.nlpcraft.internal.makro.NCMacroParser
 import org.apache.nlpcraft.internal.util.NCUtils
-import org.apache.nlpcraft.nlp.entity.parser.impl.semantic.*
-import org.apache.nlpcraft.nlp.entity.parser.impl.semantic.NCSemanticChunkKind.*
 import org.apache.nlpcraft.nlp.entity.parser.semantic.*
+import org.apache.nlpcraft.nlp.entity.parser.semantic.impl.NCSemanticChunkKind.*
 
 import java.io.InputStream
 import java.util
-import java.util.Set as JSet
 import java.util.regex.*
 import scala.collection.mutable
 import scala.collection.mutable.ArrayBuffer
-import scala.jdk.CollectionConverters.*
 
 /**
   *
   * @param elementId
   * @param value
   */
-private[impl] case class NCSemanticSynonymsElementData(elementId: String, value: Option[String])
+private[semantic] case class NCSemanticSynonymsElementData(elementId: String, value: Option[String])
 
 /**
   *
   * @param textSynonyms
   * @param mixedSynonyms
   */
-private[impl] case class NCSemanticSynonymsHolder(
+private[semantic] case class NCSemanticSynonymsHolder(
     textSynonyms: Map[String, Set[NCSemanticSynonymsElementData]],
     mixedSynonyms: Map[Int, Map[String, Seq[NCSemanticSynonym]]]
 )
@@ -55,7 +52,7 @@ private[impl] case class NCSemanticSynonymsHolder(
 /**
   *
   */
-private[impl] object NCSemanticSynonymsProcessor extends LazyLogging:
+private[semantic] object NCSemanticSynonymsProcessor extends LazyLogging:
     private final val SUSP_SYNS_CHARS = Seq("?", "*", "+")
     private final val REGEX_FIX = "//"
     private final val ID_REGEX = "^[_a-zA-Z]+[a-zA-Z0-9:\\-_]*$"
@@ -79,7 +76,7 @@ private[impl] object NCSemanticSynonymsProcessor extends LazyLogging:
             if hasNullOrEmpty(macros.keySet) then E("Some macro names are null or empty.")
             if hasNullOrEmpty(macros.values) then E("Some macro bodies are null or empty.")
 
-            val set = elements.filter(_.getSynonyms != null).flatMap(_.getSynonyms.asScala) ++ macros.values
+            val set = elements.filter(_.getSynonyms != null).flatMap(_.getSynonyms) ++ macros.values
 
             for (makro <- macros.keys if !set.exists(_.contains(makro)))
                 logger.warn(s"Unused macro detected [macro=$makro]")
@@ -96,14 +93,14 @@ private[impl] object NCSemanticSynonymsProcessor extends LazyLogging:
       * @param elemId
       * @param valueName
       */
-    private def checkSynonyms(syns: JSet[String], elemId: String, valueName: Option[String] = None): Unit =
+    private def checkSynonyms(syns: Set[String], elemId: String, valueName: Option[String] = None): Unit =
         def mkDesc: String =
             val valuePart = if valueName.isDefined then s", value=${valueName.get}" else ""
             s"[id=$elemId$valuePart]"
 
         if syns != null then
-            if hasNullOrEmpty(syns.asScala) then E(s"Some synonyms are null or empty $mkDesc")
-            val susp = syns.asScala.filter(syn => !syn.contains("//") && SUSP_SYNS_CHARS.exists(susp => syn.contains(susp)))
+            if hasNullOrEmpty(syns) then E(s"Some synonyms are null or empty $mkDesc")
+            val susp = syns.filter(syn => !syn.contains("//") && SUSP_SYNS_CHARS.exists(susp => syn.contains(susp)))
             if susp.nonEmpty then
                 logger.warn(
                     s"Suspicious synonyms detected (use of ${SUSP_SYNS_CHARS.map(s => s"'$s'").mkString(", ")} chars) $mkDesc"
@@ -134,8 +131,8 @@ private[impl] object NCSemanticSynonymsProcessor extends LazyLogging:
 
             val vals = e.getValues
             if vals != null then
-                if hasNullOrEmpty(vals.keySet().asScala) then E(s"Some values names are null or empty [element=$elemId]")
-                for ((name, syns) <- vals.asScala)
+                if hasNullOrEmpty(vals.keySet) then E(s"Some values names are null or empty [element=$elemId]")
+                for ((name, syns) <- vals)
                     checkSynonyms(syns, elemId, Option(name))
 
     /**
@@ -152,7 +149,7 @@ private[impl] object NCSemanticSynonymsProcessor extends LazyLogging:
         tokParser: NCTokenParser,
         macroParser: NCMacroParser,
         elemId: String,
-        syns: JSet[String]
+        syns: Set[String]
     ): Seq[Seq[NCSemanticSynonymChunk]] =
         case class RegexHolder(text: String, var used: Boolean = false):
             private def stripSuffix(fix: String, s: String): String = s.slice(fix.length, s.length - fix.length)
@@ -171,7 +168,7 @@ private[impl] object NCSemanticSynonymsProcessor extends LazyLogging:
             if regexes.nonEmpty then (t.getStartCharIndex to t.getEndCharIndex).flatMap(regexes.get).to(LazyList).headOption
             else None
 
-        syns.asScala.flatMap(macroParser.expand).
+        syns.flatMap(macroParser.expand).
             map(syn => {
                 // Drops redundant spaces without any warnings.
                 val normSyn = syn.split(" ").map(_.strip).filter(_.nonEmpty)
@@ -190,7 +187,7 @@ private[impl] object NCSemanticSynonymsProcessor extends LazyLogging:
                         (start to end).foreach(regexes += _ -> r)
 
                 // Tokenizes synonym without regex chunks. Regex chunks are used as is, without tokenization.
-                tokParser.tokenize(normSyn.mkString(" ")).asScala.flatMap(tok =>
+                tokParser.tokenize(normSyn.mkString(" ")).flatMap(tok =>
                     findRegex(tok) match
                         case Some(regex) =>
                             if regex.used then None
@@ -225,9 +222,8 @@ private[impl] object NCSemanticSynonymsProcessor extends LazyLogging:
 
         if macros != null then for ((name, body) <- macros) macroParser.addMacro(name, body)
 
-        case class Holder(synonym: NCSemanticSynonym, elementId: String) {
+        case class Holder(synonym: NCSemanticSynonym, elementId: String):
             lazy val root: String = synonym.chunks.map(p => if p.isText then p.stem else p.text).mkString(" ")
-        }
 
         val buf = mutable.ArrayBuffer.empty[Holder]
 
@@ -244,7 +240,7 @@ private[impl] object NCSemanticSynonymsProcessor extends LazyLogging:
                 add(convertSynonyms(stemmer, tokParser, macroParser, elemId, e.getSynonyms).map(NCSemanticSynonym(_)))
 
             if e.getValues != null then
-                for ((name, syns) <- e.getValues.asScala)
+                for ((name, syns) <- e.getValues)
                     addSpec(name, value = name)
 
                     if syns != null then
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/package-info.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/package-info.java
deleted file mode 100644
index 129c1f04..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/package-info.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Contains built-in semantic entity parsers.
- */
-package org.apache.nlpcraft.nlp.entity.parser.semantic;
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnBracketsTokenEnricher.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnBracketsTokenEnricher.java
deleted file mode 100644
index e76fc1dd..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnBracketsTokenEnricher.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.nlp.token.enricher;
-
-import org.apache.nlpcraft.NCModelConfig;
-import org.apache.nlpcraft.NCRequest;
-import org.apache.nlpcraft.NCToken;
-import org.apache.nlpcraft.NCTokenEnricher;
-import org.apache.nlpcraft.nlp.token.enricher.impl.NCEnBracketsTokenEnricherImpl;
-
-import java.util.List;
-
-/**
- * TODO: enriches with <code>brackets</code> property.
- */
-public class NCEnBracketsTokenEnricher implements NCTokenEnricher {
-    private final NCEnBracketsTokenEnricherImpl impl = new NCEnBracketsTokenEnricherImpl();
-
-    @Override
-    public void enrich(NCRequest req, NCModelConfig cfg, List<NCToken> toks) {
-        assert impl != null;
-        impl.enrich(req, cfg, toks);
-    }
-
-    @Override
-    public void onStart(NCModelConfig cfg) {
-        impl.onStart(cfg);
-    }
-
-    @Override
-    public void onStop(NCModelConfig cfg) {
-        impl.onStop(cfg);
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/impl/NCEnBracketsTokenEnricherImpl.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnBracketsTokenEnricher.scala
similarity index 87%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/impl/NCEnBracketsTokenEnricherImpl.scala
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnBracketsTokenEnricher.scala
index a1ff627d..4a5b54ce 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/impl/NCEnBracketsTokenEnricherImpl.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnBracketsTokenEnricher.scala
@@ -15,21 +15,19 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.nlp.token.enricher.impl
+package org.apache.nlpcraft.nlp.token.enricher
 
 import com.typesafe.scalalogging.LazyLogging
 import org.apache.nlpcraft.*
 
 import java.io.*
-import java.util.List as JList
 import scala.collection.mutable
-import scala.jdk.CollectionConverters.CollectionHasAsScala
 
 /**
   *
   */
-class NCEnBracketsTokenEnricherImpl extends NCTokenEnricher with LazyLogging:
-    override def enrich(req: NCRequest, cfg: NCModelConfig, toks: JList[NCToken]): Unit =
+class NCEnBracketsTokenEnricher extends NCTokenEnricher with LazyLogging:
+    override def enrich(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): Unit =
         val stack = new java.util.Stack[String]()
         val map = mutable.HashMap.empty[NCToken, Boolean]
         var ok = true
@@ -37,7 +35,7 @@ class NCEnBracketsTokenEnricherImpl extends NCTokenEnricher with LazyLogging:
         def check(expected: String): Unit = if stack.empty() || stack.pop() != expected then ok = false
         def mark(t: NCToken): Unit = map += t -> !stack.isEmpty
 
-        for (t <- toks.asScala if ok)
+        for (t <- toks if ok)
             t.getText match
                 case "(" | "{" | "[" | "<" =>
                     mark(t)
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnDictionaryTokenEnricher.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnDictionaryTokenEnricher.java
deleted file mode 100644
index e75afdc3..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnDictionaryTokenEnricher.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.nlp.token.enricher;
-
-import org.apache.nlpcraft.NCModelConfig;
-import org.apache.nlpcraft.NCRequest;
-import org.apache.nlpcraft.NCToken;
-import org.apache.nlpcraft.NCTokenEnricher;
-import org.apache.nlpcraft.nlp.token.enricher.impl.NCEnDictionaryTokenEnricherImpl;
-
-import java.util.List;
-
-/**
- * TODO: enriches with <code>dict</code> property.
- */
-public class NCEnDictionaryTokenEnricher implements NCTokenEnricher {
-    private final NCEnDictionaryTokenEnricherImpl impl = new NCEnDictionaryTokenEnricherImpl();
-
-    @Override
-    public void enrich(NCRequest req, NCModelConfig cfg, List<NCToken> toks) {
-        assert impl != null;
-        impl.enrich(req, cfg, toks);
-    }
-
-    @Override
-    public void onStart(NCModelConfig cfg) {
-        impl.onStart(cfg);
-    }
-
-    @Override
-    public void onStop(NCModelConfig cfg) {
-        impl.onStop(cfg);
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/impl/NCEnDictionaryTokenEnricherImpl.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnDictionaryTokenEnricher.scala
similarity index 79%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/impl/NCEnDictionaryTokenEnricherImpl.scala
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnDictionaryTokenEnricher.scala
index ae804d0c..221606bb 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/impl/NCEnDictionaryTokenEnricherImpl.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnDictionaryTokenEnricher.scala
@@ -15,23 +15,21 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.nlp.token.enricher.impl
+package org.apache.nlpcraft.nlp.token.enricher
 
 import org.apache.nlpcraft.*
 import org.apache.nlpcraft.internal.util.NCUtils
 
-import java.util.List as JList
-
 /**
   *
   */
-class NCEnDictionaryTokenEnricherImpl extends NCTokenEnricher:
+class NCEnDictionaryTokenEnricher extends NCTokenEnricher:
     private var dict: Set[String] = _
 
     init()
 
     private def init(): Unit = dict = NCUtils.readResource("moby/354984si.ngl", "iso-8859-1").toSet
-    private def getLemma(t: NCToken): String = t.getOpt("lemma").orElseThrow(() => throw new NCException("Lemma not found in token."))
+    private def getLemma(t: NCToken): String = t.getOpt("lemma").getOrElse(throw new NCException("Lemma not found in token."))
 
-    override def enrich(req: NCRequest, cfg: NCModelConfig, toks: JList[NCToken]): Unit =
-        toks.forEach(t => t.put("dict", dict.contains(getLemma(t))))
+    override def enrich(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): Unit =
+        toks.foreach(t => t.put("dict", dict.contains(getLemma(t))))
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnQuotesTokenEnricher.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnQuotesTokenEnricher.java
deleted file mode 100644
index ac0aab87..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnQuotesTokenEnricher.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.nlp.token.enricher;
-
-import org.apache.nlpcraft.NCModelConfig;
-import org.apache.nlpcraft.NCRequest;
-import org.apache.nlpcraft.NCToken;
-import org.apache.nlpcraft.NCTokenEnricher;
-import org.apache.nlpcraft.nlp.token.enricher.impl.NCEnQuotesTokenEnricherImpl;
-
-import java.util.List;
-
-/**
- * TODO: enriches with <code>quoted</code> property.
- */
-public class NCEnQuotesTokenEnricher implements NCTokenEnricher {
-    private final NCEnQuotesTokenEnricherImpl impl = new NCEnQuotesTokenEnricherImpl();
-
-    @Override
-    public void enrich(NCRequest req, NCModelConfig cfg, List<NCToken> toks) {
-        assert impl != null;
-        impl.enrich(req, cfg, toks);
-    }
-
-    @Override
-    public void onStart(NCModelConfig cfg) {
-        impl.onStart(cfg);
-    }
-
-    @Override
-    public void onStop(NCModelConfig cfg) {
-        impl.onStop(cfg);
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/impl/NCEnQuotesTokenEnricherImpl.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnQuotesTokenEnricher.scala
similarity index 81%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/impl/NCEnQuotesTokenEnricherImpl.scala
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnQuotesTokenEnricher.scala
index c9cda186..9ea12182 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/impl/NCEnQuotesTokenEnricherImpl.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnQuotesTokenEnricher.scala
@@ -15,24 +15,20 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.nlp.token.enricher.impl
+package org.apache.nlpcraft.nlp.token.enricher
 
 import com.typesafe.scalalogging.LazyLogging
 import org.apache.nlpcraft.*
 
-import java.util.List as JList
-import scala.jdk.CollectionConverters.*
-
 /**
   *
   */
-class NCEnQuotesTokenEnricherImpl extends NCTokenEnricher with LazyLogging:
+class NCEnQuotesTokenEnricher extends NCTokenEnricher with LazyLogging:
     private final val Q_POS: Set[String] = Set("``", "''")
-    private def getPos(t: NCToken): String = t.getOpt("pos").orElseThrow(() => throw new NCException("POS not found in token."))
+    private def getPos(t: NCToken): String = t.getOpt("pos").getOrElse(throw new NCException("POS not found in token."))
     private def isQuote(t: NCToken): Boolean = Q_POS.contains(getPos(t))
 
-    override def enrich(req: NCRequest, cfg: NCModelConfig, toksList: JList[NCToken]): Unit =
-        val toks = toksList.asScala
+    override def enrich(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): Unit =
         val quotes = toks.filter(isQuote)
 
         // Start and end quote can be different ("a` processed as valid)
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnStopWordsTokenEnricher.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnStopWordsTokenEnricher.java
deleted file mode 100644
index 395ade90..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnStopWordsTokenEnricher.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.nlp.token.enricher;
-
-import org.apache.nlpcraft.NCModelConfig;
-import org.apache.nlpcraft.NCRequest;
-import org.apache.nlpcraft.NCToken;
-import org.apache.nlpcraft.NCTokenEnricher;
-import org.apache.nlpcraft.nlp.token.enricher.impl.NCEnStopWordsTokenEnricherImpl;
-
-import java.util.List;
-import java.util.Set;
-
-/**
- * TODO: enriches with <code>stopword</code> property.
- */
-public class NCEnStopWordsTokenEnricher implements NCTokenEnricher {
-    private final NCEnStopWordsTokenEnricherImpl impl;
-
-    /**
-     *
-     */
-    public NCEnStopWordsTokenEnricher(Set<String> addSw, Set<String> exclSw) {
-        impl = new NCEnStopWordsTokenEnricherImpl(addSw, exclSw);
-    }
-
-    public NCEnStopWordsTokenEnricher() {
-        impl = new NCEnStopWordsTokenEnricherImpl(null, null);
-    }
-
-    @Override
-    public void enrich(NCRequest req, NCModelConfig cfg, List<NCToken> toks) {
-        assert impl != null;
-        impl.enrich(req, cfg, toks);
-    }
-
-    @Override
-    public void onStart(NCModelConfig cfg) {
-        impl.onStart(cfg);
-    }
-
-    @Override
-    public void onStop(NCModelConfig cfg) {
-        impl.onStop(cfg);
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/impl/NCEnStopWordsTokenEnricherImpl.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnStopWordsTokenEnricher.scala
similarity index 95%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/impl/NCEnStopWordsTokenEnricherImpl.scala
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnStopWordsTokenEnricher.scala
index efcf04fb..4955e99c 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/impl/NCEnStopWordsTokenEnricherImpl.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnStopWordsTokenEnricher.scala
@@ -15,26 +15,23 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.nlp.token.enricher.impl
+package org.apache.nlpcraft.nlp.token.enricher
 
 import com.typesafe.scalalogging.LazyLogging
 import opennlp.tools.stemmer.PorterStemmer
 import org.apache.nlpcraft.*
 import org.apache.nlpcraft.internal.util.NCUtils
-import org.apache.nlpcraft.nlp.entity.parser.semantic.NCSemanticStemmer
 
 import java.io.*
 import java.util
-import java.util.{List as JList, Set as JSet}
 import scala.annotation.tailrec
-import scala.collection.{IndexedSeq, Seq, mutable}
+import scala.collection.*
 import scala.concurrent.ExecutionContext
-import scala.jdk.CollectionConverters.*
 
 /**
   *
   */
-object NCEnStopWordsTokenEnricherImpl:
+object NCEnStopWordsTokenEnricher:
     // Condition types.
     type Wildcard = (String, String)
     type Word = String
@@ -98,13 +95,13 @@ object NCEnStopWordsTokenEnricherImpl:
         "percent"
     )
 
-    private def getPos(t: NCToken): String = t.getOpt("pos").orElseThrow(() => throw new NCException(s"POS not found in token: ${t.keysSet()}"))
-    private def getLemma(t: NCToken): String = t.getOpt("lemma").orElseThrow(() => throw new NCException(s"Lemma not found in token: ${t.keysSet()}"))
+    private def getPos(t: NCToken): String = t.getOpt("pos").getOrElse(throw new NCException(s"POS not found in token: ${t.keysSet}"))
+    private def getLemma(t: NCToken): String = t.getOpt("lemma").getOrElse(throw new NCException(s"Lemma not found in token: ${t.keysSet}"))
     private def isQuote(t: NCToken): Boolean = Q_POS.contains(getPos(t))
     private def toLemmaKey(toks: Seq[NCToken]): String = toks.map(getLemma).mkString(" ")
     private def toValueKey(toks: Seq[NCToken]): String = toks.map(_.getText.toLowerCase).mkString(" ")
     private def toOriginalKey(toks: Seq[NCToken]): String = toks.map(_.getText).mkString(" ")
-    private def isStopWord(t: NCToken): Boolean = t.getOpt[Boolean]("stopword").orElse(false)
+    private def isStopWord(t: NCToken): Boolean = t.getOpt[Boolean]("stopword").getOrElse(false)
 
     /**
       * Gets all sequential permutations of tokens in this NLP sentence.
@@ -113,7 +110,7 @@ object NCEnStopWordsTokenEnricherImpl:
       * @param tokens Tokens.
       * @param maxLen Maximum number of tokens in the sequence.
       */
-    private[impl] def tokenMixWithStopWords(tokens: Seq[NCToken], maxLen: Int = Integer.MAX_VALUE): Seq[Seq[NCToken]] =
+    private[enricher] def tokenMixWithStopWords(tokens: Seq[NCToken], maxLen: Int = Integer.MAX_VALUE): Seq[Seq[NCToken]] =
         /**
           * Gets all combinations for sequence of mandatory tokens with stop-words and without.
           *
@@ -164,14 +161,14 @@ object NCEnStopWordsTokenEnricherImpl:
     private def tokenMix(toks: Seq[NCToken], maxLen: Int = Integer.MAX_VALUE): Seq[Seq[NCToken]] =
         (for (n <- toks.length until 0 by -1 if n <= maxLen) yield toks.sliding(n)).flatten
 
-import org.apache.nlpcraft.nlp.token.enricher.impl.NCEnStopWordsTokenEnricherImpl.*
+import org.apache.nlpcraft.nlp.token.enricher.NCEnStopWordsTokenEnricher.*
 
 /**
   *
   * @param addStopsSet
   * @param exclStopsSet
   */
-class NCEnStopWordsTokenEnricherImpl(addStopsSet: JSet[String], exclStopsSet: JSet[String]) extends NCTokenEnricher with LazyLogging:
+class NCEnStopWordsTokenEnricher(addStopsSet: Set[String] = Set.empty, exclStopsSet: Set[String] = Set.empty) extends NCTokenEnricher with LazyLogging:
     private final val stemmer = new PorterStemmer
 
     private var addStems: Set[String] = _
@@ -280,8 +277,8 @@ class NCEnStopWordsTokenEnricherImpl(addStopsSet: JSet[String], exclStopsSet: JS
       * 
       */
     private def init(): Unit =
-        addStems = if addStopsSet == null then Set.empty else addStopsSet.asScala.toSet.map(stem)
-        exclStems = if exclStopsSet == null then Set.empty else exclStopsSet.asScala.toSet.map(stem)
+        addStems = if addStopsSet == null then Set.empty else addStopsSet.map(stem)
+        exclStems = if exclStopsSet == null then Set.empty else exclStopsSet.map(stem)
 
         def check(name: String, set: Set[String]): Unit =
             if set.exists(_.exists(_.isWhitespace)) then E(s"$name contain a string with whitespaces.")
@@ -296,8 +293,10 @@ class NCEnStopWordsTokenEnricherImpl(addStopsSet: JSet[String], exclStopsSet: JS
 
         // Stemmatization is done already by generator.
         NCUtils.execPar(
-            () => firstWords = read("stopwords/first_words.txt.gz"),
-            () => nounWords = read("stopwords/noun_words.txt.gz")
+            Seq(
+                () => firstWords = read("stopwords/first_words.txt.gz"),
+                () => nounWords = read("stopwords/noun_words.txt.gz")
+            )
         )(ExecutionContext.Implicits.global)
 
         // Case sensitive.
@@ -319,7 +318,6 @@ class NCEnStopWordsTokenEnricherImpl(addStopsSet: JSet[String], exclStopsSet: JS
         // 1. Prepares accumulation data structure.
         enum WordForm:
             case STEM, LEM, ORIG
-
         import WordForm.*
 
         class Condition[T]:
@@ -362,7 +360,7 @@ class NCEnStopWordsTokenEnricherImpl(addStopsSet: JSet[String], exclStopsSet: JS
 
         // 2. Accumulates data of each parsed line.
         for (line <- lines)
-            def throwError(msg: String): Unit = E(s"Invalid stop word configuration [line=$line, reason=$msg]") 
+            def throwError(msg: String): Unit = E(s"Invalid stop word configuration [line=$line, reason=$msg]")
 
             var s = line.trim
 
@@ -506,9 +504,7 @@ class NCEnStopWordsTokenEnricherImpl(addStopsSet: JSet[String], exclStopsSet: JS
 
         processCommonStops0(ns)
 
-    override def enrich(req: NCRequest, cfg: NCModelConfig, toksList: JList[NCToken]): Unit =
-        val toks = toksList.asScala
-
+    override def enrich(req: NCRequest, cfg: NCModelConfig, toks: List[NCToken]): Unit =
         // Stop words and exceptions caches for this sentence.
         val cacheSw = mutable.HashMap.empty[Seq[NCToken], Boolean]
         val cacheEx = mutable.HashMap.empty[Seq[NCToken], Boolean]
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnSwearWordsTokenEnricher.java b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnSwearWordsTokenEnricher.java
deleted file mode 100644
index 7247554d..00000000
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnSwearWordsTokenEnricher.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nlpcraft.nlp.token.enricher;
-
-import org.apache.nlpcraft.NCModelConfig;
-import org.apache.nlpcraft.NCRequest;
-import org.apache.nlpcraft.NCToken;
-import org.apache.nlpcraft.NCTokenEnricher;
-import org.apache.nlpcraft.nlp.token.enricher.impl.NCEnSwearWordsTokenEnricherImpl;
-
-import java.util.List;
-import java.util.Objects;
-
-/**
- * TODO: enriches with <code>swear</code> property.
- */
-public class NCEnSwearWordsTokenEnricher implements NCTokenEnricher {
-    private final NCEnSwearWordsTokenEnricherImpl impl;
-
-    /**
-     * TODO: swear_words.txt - describe where it can be downloaded.
-     * 
-     * @param mdlSrc
-     */
-    public NCEnSwearWordsTokenEnricher(String mdlSrc) {
-        Objects.requireNonNull(mdlSrc, "Swear words model file cannot be null.");
-
-        impl = new NCEnSwearWordsTokenEnricherImpl(mdlSrc);
-    }
-
-    @Override
-    public void enrich(NCRequest req, NCModelConfig cfg, List<NCToken> toks) {
-        assert impl != null;
-        impl.enrich(req, cfg, toks);
-    }
-
-    @Override
-    public void onStart(NCModelConfig cfg) {
-        impl.onStart(cfg);
-    }
-
-    @Override
-    public void onStop(NCModelConfig cfg) {
-        impl.onStop(cfg);
-    }
-}
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/impl/NCEnSwearWordsTokenEnricherImpl.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnSwearWordsTokenEnricher.scala
similarity index 81%
rename from nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/impl/NCEnSwearWordsTokenEnricherImpl.scala
rename to nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnSwearWordsTokenEnricher.scala
... 2230 lines suppressed ...