You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@nlpcraft.apache.org by ar...@apache.org on 2020/09/17 09:36:35 UTC

[incubator-nlpcraft] branch NLPCRAFT-128 updated (f3c305c -> 74b573e)

This is an automated email from the ASF dual-hosted git repository.

aradzinski pushed a change to branch NLPCRAFT-128
in repository https://gitbox.apache.org/repos/asf/incubator-nlpcraft.git.


    from f3c305c  WIP.
     new 892f811  WIP.
     new 74b573e  WIP.

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../nlp/core/stanford/NCStanfordCoreManager.scala  |  14 +-
 .../nlp/core/stanford/NCStanfordNerEnricher.scala  |  19 ++-
 .../nlp/core/stanford/NCStanfordParser.scala       |  13 +-
 .../org/apache/nlpcraft/common/NCService.scala     |  53 ++++---
 .../common/extcfg/NCExternalConfigManager.scala    |  15 +-
 .../common/nlp/core/NCNlpCoreManager.scala         |  21 ++-
 .../nlp/core/opennlp/NCOpenNlpTokenizer.scala      |  25 +++-
 .../common/nlp/dict/NCDictionaryManager.scala      |  12 +-
 .../common/nlp/numeric/NCNumericManager.scala      |  10 +-
 .../org/apache/nlpcraft/common/util/NCUtils.scala  |  19 +--
 .../model/intent/impl/NCIntentSolver.scala         |   4 +-
 .../test/impl/NCTestAutoModelValidatorImpl.scala   |   5 +-
 .../nlpcraft/probe/mgrs/NCProbeSynonym.scala       |   5 +-
 .../nlpcraft/probe/mgrs/cmd/NCCommandManager.scala |  18 +++
 .../probe/mgrs/conn/NCConnectionManager.scala      |   4 +-
 .../mgrs/conversation/NCConversationManager.scala  |  13 +-
 .../probe/mgrs/deploy/NCDeployManager.scala        | 159 ++++++++++-----------
 .../mgrs/dialogflow/NCDialogFlowManager.scala      |  13 +-
 .../probe/mgrs/lifecycle/NCLifecycleManager.scala  |  22 ++-
 .../nlpcraft/probe/mgrs/model/NCModelManager.scala |  32 +++--
 .../nlpcraft/probe/mgrs/nlp/NCProbeEnricher.scala  |   3 +-
 .../probe/mgrs/nlp/NCProbeEnrichmentManager.scala  |  35 +++--
 .../dictionary/NCDictionaryEnricher.scala          |  23 +--
 .../mgrs/nlp/enrichers/limit/NCLimitEnricher.scala |  16 ++-
 .../mgrs/nlp/enrichers/model/NCModelEnricher.scala |  13 +-
 .../enrichers/relation/NCRelationEnricher.scala    |  14 +-
 .../mgrs/nlp/enrichers/sort/NCSortEnricher.scala   |  19 ++-
 .../enrichers/stopword/NCStopWordEnricher.scala    |  37 +++--
 .../suspicious/NCSuspiciousNounsEnricher.scala     |  13 +-
 .../mgrs/nlp/validate/NCValidateManager.scala      |  17 ++-
 .../nlpcraft/server/company/NCCompanyManager.scala |  17 ++-
 .../server/feedback/NCFeedbackManager.scala        |  15 +-
 .../apache/nlpcraft/server/geo/NCGeoManager.scala  |  26 ++--
 .../geo/tools/NCGeoSyntheticNamesGenerator.scala   |   3 +-
 .../lifecycle/NCServerLifecycleManager.scala       |  21 ++-
 .../server/nlp/core/NCNlpNerEnricher.scala         |   4 +-
 .../server/nlp/core/NCNlpServerManager.scala       |  13 +-
 .../nlp/core/google/NCGoogleNerEnricher.scala      |  23 ++-
 .../nlp/core/opennlp/NCOpenNlpNerEnricher.scala    |  25 +++-
 .../server/nlp/core/opennlp/NCOpenNlpParser.scala  |  19 ++-
 .../server/nlp/core/spacy/NCSpaCyNerEnricher.scala |  21 ++-
 .../nlp/enrichers/NCServerEnrichmentManager.scala  |  14 +-
 .../nlp/enrichers/basenlp/NCBaseNlpEnricher.scala  |  26 +++-
 .../coordinate/NCCoordinatesEnricher.scala         |  19 ++-
 .../server/nlp/enrichers/date/NCDateEnricher.scala | 131 +++++++++--------
 .../server/nlp/enrichers/geo/NCGeoEnricher.scala   |  21 ++-
 .../nlp/enrichers/numeric/NCNumericEnricher.scala  |  23 ++-
 .../nlp/enrichers/quote/NCQuoteEnricher.scala      |  23 ++-
 .../enrichers/stopword/NCStopWordEnricher.scala    |  16 ++-
 .../server/nlp/preproc/NCPreProcessManager.scala   |  17 ++-
 .../server/nlp/spell/NCSpellCheckManager.scala     |  19 ++-
 .../server/nlp/wordnet/NCWordNetManager.scala      |  16 ++-
 .../nlpcraft/server/probe/NCProbeManager.scala     |  18 +--
 .../server/proclog/NCProcessLogManager.scala       |  13 +-
 .../nlpcraft/server/query/NCQueryManager.scala     |  21 ++-
 .../nlpcraft/server/rest/NCBasicRestApi.scala      |  16 +--
 .../nlpcraft/server/rest/NCRestManager.scala       |  15 +-
 .../apache/nlpcraft/server/sql/NCSqlManager.scala  |  16 ++-
 .../server/sugsyn/NCSuggestSynonymManager.scala    |  15 +-
 .../apache/nlpcraft/server/tx/NCTxManager.scala    |  29 ++--
 .../nlpcraft/server/user/NCUserManager.scala       |  13 +-
 61 files changed, 896 insertions(+), 438 deletions(-)


[incubator-nlpcraft] 02/02: WIP.

Posted by ar...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

aradzinski pushed a commit to branch NLPCRAFT-128
in repository https://gitbox.apache.org/repos/asf/incubator-nlpcraft.git

commit 74b573e9798985907c1c1c6b1759386569d12a81
Author: Aaron Radzinski <ar...@datalingvo.com>
AuthorDate: Thu Sep 17 02:36:18 2020 -0700

    WIP.
---
 .../org/apache/nlpcraft/common/NCService.scala     |   4 +-
 .../org/apache/nlpcraft/common/util/NCUtils.scala  |  19 ++--
 .../model/intent/impl/NCIntentSolver.scala         |   4 +-
 .../nlpcraft/probe/mgrs/NCProbeSynonym.scala       |   5 +-
 .../nlpcraft/probe/mgrs/cmd/NCCommandManager.scala |  18 +++
 .../probe/mgrs/deploy/NCDeployManager.scala        | 126 ++++++++++-----------
 .../nlpcraft/server/query/NCQueryManager.scala     |   4 +-
 .../nlpcraft/server/rest/NCBasicRestApi.scala      |  16 +--
 8 files changed, 104 insertions(+), 92 deletions(-)

diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/NCService.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/NCService.scala
index 33b5559..571c501 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/NCService.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/NCService.scala
@@ -48,7 +48,7 @@ abstract class NCService extends LazyLogging with NCOpenCensusTrace {
     @throws[NCE]
     def start(parent: Span = null): NCService =
         // Make sure this is not called by subclass.
-        throw new AssertionError()
+        throw new AssertionError(s"NCService#start() should not be called directly in '${U.cleanClassName(getClass)}' service.")
 
     /**
       * Stops this service.
@@ -58,7 +58,7 @@ abstract class NCService extends LazyLogging with NCOpenCensusTrace {
     @throws[NCE]
     def stop(parent: Span = null): Unit =
         // Make sure this is not called by subclass.
-        throw new AssertionError()
+        throw new AssertionError(s"NCService#stop() should not be called directly in '${U.cleanClassName(getClass)}' service.")
 
     /**
      * Gets name of this service (as its class name).
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/util/NCUtils.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/util/NCUtils.scala
index 51a2b28..1491a9a 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/util/NCUtils.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/util/NCUtils.scala
@@ -1371,19 +1371,20 @@ object NCUtils extends LazyLogging {
         while (x != null) {
             var first = true
 
-            val msg = x.getLocalizedMessage
+            var msg = x.getLocalizedMessage
 
-            if (msg != null) {
-                x.getLocalizedMessage.split("\n").foreach(line ⇒ {
-                    val msg = s"${" " * indent}${if (first) s"$ansiRedFg+-$ansiReset" else "  "}${line.trim}"
+            if (msg == null)
+                msg = cleanClassName(x.getClass)
 
-                    if (err) logger.error(msg) else logger.warn(msg)
+            msg.split("\n").foreach(line ⇒ {
+                val msg = s"${" " * indent}${if (first) s"$ansiRedFg+-$ansiReset" else "  "}${line.trim}"
 
-                    first = false
-                })
+                if (err) logger.error(msg) else logger.warn(msg)
 
-                indent += INDENT
-            }
+                first = false
+            })
+
+            indent += INDENT
 
             x = x.getCause
         }
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/model/intent/impl/NCIntentSolver.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/model/intent/impl/NCIntentSolver.scala
index 4dbedc4..322856b 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/model/intent/impl/NCIntentSolver.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/model/intent/impl/NCIntentSolver.scala
@@ -27,7 +27,7 @@ import org.apache.nlpcraft.model.impl.NCVariantImpl
 import org.apache.nlpcraft.model.intent.utils.NCDslIntent
 import org.apache.nlpcraft.model.{NCContext, NCIntentMatch, NCIntentSkip, NCModel, NCRejection, NCResult, NCToken, NCVariant}
 import org.apache.nlpcraft.probe.mgrs.dialogflow.NCDialogFlowManager
-
+import org.apache.nlpcraft.common.ansi.NCAnsiColor._
 import scala.collection.JavaConverters._
 
 /**
@@ -151,7 +151,7 @@ class NCIntentSolver(intents: List[(NCDslIntent/*Intent*/, NCIntentMatch ⇒ NCR
                 if (cbRes.getIntentId == null)
                     cbRes.setIntentId(res.intentId)
                     
-                logger.info(s"Intent '${res.intentId}' for variant #${res.variantIdx + 1} selected as the **winning match**.")
+                logger.info(s"Intent '${res.intentId}' for variant #${res.variantIdx + 1} selected as the $ansiRedFg<<best match>>.$ansiReset")
 
                 NCDialogFlowManager.addMatchedIntent(res.intentId, req.getUser.getId, ctx.getModel.getId, span)
                 
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/NCProbeSynonym.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/NCProbeSynonym.scala
index 8e3258e..6ed8f44 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/NCProbeSynonym.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/NCProbeSynonym.scala
@@ -29,7 +29,7 @@ import scala.collection.mutable.ArrayBuffer
   *     In this case chunks contain the element ID.
   * @param isValueName Is this an implicit value name synonym?
   *     In this case chunks contain value name.
-  * @param isDirect Direct or permutated synonym flag.
+  * @param isDirect Direct or permuted synonym flag.
   * @param value Optional value name if this is a value synonym.
   */
 class NCProbeSynonym(
@@ -115,6 +115,7 @@ class NCProbeSynonym(
                             r.matcher(get0((t: Token) ⇒ t.origText, (w: Word) ⇒ w.origText)).matches() ||
                             r.matcher(get0((t: Token) ⇒ t.normText, (w: Word) ⇒ w.normText)).matches()
                         case DSL ⇒ get0((t: Token) ⇒ chunk.dslPred.apply(t), (_: Word) ⇒ false)
+
                         case _ ⇒ throw new AssertionError()
                     }
             }
@@ -130,6 +131,7 @@ class NCProbeSynonym(
             isValueSynonym match {
                 case true if !that.isValueSynonym ⇒ 1
                 case false if that.isValueSynonym ⇒ -1
+
                 case _ ⇒ 0
             }
         
@@ -140,6 +142,7 @@ class NCProbeSynonym(
                 case true if !that.isElementId ⇒ 1
                 case false if that.isElementId ⇒ -1
                 case true if that.isElementId ⇒ 0
+
                 case _ ⇒ // None are element IDs.
                     if (length > that.length)
                         1
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/cmd/NCCommandManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/cmd/NCCommandManager.scala
index d481a22..0e9c333 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/cmd/NCCommandManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/cmd/NCCommandManager.scala
@@ -40,6 +40,24 @@ object NCCommandManager extends NCService {
     private final val GSON = new Gson()
 
     /**
+     * Starts this service.
+     *
+     * @param parent Optional parent span.
+     */
+    override def start(parent: Span): NCService = startScopedSpan("start", parent) { _ ⇒
+        ackStart()
+    }
+
+    /**
+     * Stops this service.
+     *
+     * @param parent Optional parent span.
+     */
+    override def stop(parent: Span): Unit = startScopedSpan("stop", parent) { _ ⇒
+        ackStop()
+    }
+
+    /**
       *
       * @param msg Server message to process.
       * @param parent Optional parent span.
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala
index 9ba109d..aa6356c 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala
@@ -173,7 +173,7 @@ object NCDeployManager extends NCService with DecorateAsScala {
                     throw new NCE(s"Model property cannot contain a string with whitespaces [" +
                         s"mdlId=$mdlId, " +
                         s"name=$name, " +
-                        s"word=$word" +
+                        s"word='$word'" +
                     s"]")
                 else
                     NCNlpCoreManager.stem(word)
@@ -301,7 +301,7 @@ object NCDeployManager extends NCService with DecorateAsScala {
 
                 // IDs can only be simple strings.
                 if (chunks.exists(_.kind != TEXT))
-                    throw new NCE(s"Invalid ID format [" +
+                    throw new NCE(s"Invalid element or value ID format [" +
                         s"mdlId=$mdlId, " +
                         s"id=$id" +
                     s"]")
@@ -309,95 +309,83 @@ object NCDeployManager extends NCService with DecorateAsScala {
                 chunks
             }
 
-            // Add element ID as a synonyms (dups ignored).
-            val idChunks = Seq(chunkIdSplit(elmId))
-
-            idChunks.distinct.foreach(ch ⇒ addSynonym(isElementId = true, isValueName = false, null, ch))
-
-            // Add straight element synonyms (dups printed as warnings).
-            val synsChunks = for (syn ← elm.getSynonyms.asScala.flatMap(parser.expand)) yield chunkSplit(syn)
-
-            if (U.containsDups(synsChunks.flatten.toList))
-                logger.warn(s"Model element synonym dups found [" +
-                    s"mdlId=$mdlId, " +
-                    s"elmId=$elmId, " +
-                    s"synonym=${synsChunks.diff(synsChunks.distinct).distinct.map(_.mkString(",")).mkString(";")}" +
-                s"]")
-
-            synsChunks.distinct.foreach(ch ⇒ addSynonym(isElementId = false, isValueName = false, null, ch))
+            // Add element ID as a synonyms.
+            Seq(chunkIdSplit(elmId))
+                .distinct
+                .foreach(chunks ⇒ addSynonym(
+                    isElementId = true,
+                    isValueName = false,
+                    null,
+                    chunks
+                ))
+
+            // Add straight element synonyms.
+            (for (syn ← elm.getSynonyms.asScala.flatMap(parser.expand)) yield chunkSplit(syn))
+                .distinct
+                .foreach(chunks ⇒ addSynonym(
+                    isElementId = false,
+                    isValueName = false,
+                    null,
+                    chunks
+                ))
 
             val vals =
                 (if (elm.getValues != null) elm.getValues.asScala else Seq.empty) ++
-                    (if (elm.getValueLoader != null) elm.getValueLoader.load(elm).asScala else Seq.empty)
+                (if (elm.getValueLoader != null) elm.getValueLoader.load(elm).asScala else Seq.empty)
 
             // Add value synonyms.
-            val valNames = vals.map(_.getName).toList
-
-            if (U.containsDups(valNames))
-                logger.warn(s"Model element values names dups found [" +
-                    s"mdlId=$mdlId, " +
-                    s"elmId=$elmId, " +
-                    s"names=${valNames.diff(valNames.distinct).distinct.mkString(",")}" +
-                s"]")
-
             for (v ← vals.map(p ⇒ p.getName → p).toMap.values) {
-                val valId = v.getName
+                val valName = v.getName
                 val valSyns = v.getSynonyms.asScala
 
-                val idChunks = Seq(chunkIdSplit(valId))
+                val nameChunks = Seq(chunkIdSplit(valName))
 
-                // Add value name as a synonyms (dups ignored)
-                idChunks.distinct.foreach(ch ⇒ addSynonym(isElementId = false, isValueName = true, valId, ch))
+                // Add value name as a synonyms.
+                nameChunks.distinct.foreach(chunks ⇒ addSynonym(
+                    isElementId = false,
+                    isValueName = true,
+                    valName,
+                    chunks
+                ))
 
-                // Add straight value synonyms (dups printed as warnings)
                 var skippedOneLikeName = false
 
-                val chunks =
-                    valSyns.flatMap(parser.expand).flatMap(valSyn ⇒ {
-                        val valSyns = chunkSplit(valSyn)
-
-                        if (idChunks.contains(valSyns) && !skippedOneLikeName) {
-                            skippedOneLikeName = true
+                val chunks = valSyns.flatMap(parser.expand).flatMap(valSyn ⇒ {
+                    val valSyns = chunkSplit(valSyn)
 
-                            None
-                        }
-                        else
-                            Some(valSyns)
-                    })
-
-                if (U.containsDups(chunks.toList))
-                    logger.warn(s"Model element value synonyms dups found [" +
-                        s"mdlId=$mdlId, " +
-                        s"elmId=$elmId, " +
-                        s"valId=$valId, " +
-                        s"synonym=${chunks.diff(chunks.distinct).distinct.map(_.mkString(",")).mkString(";")}" +
-                    s"]")
+                    if (nameChunks.contains(valSyns) && !skippedOneLikeName) {
+                        skippedOneLikeName = true
 
-                chunks.distinct.foreach(ch ⇒ addSynonym(isElementId = false, isValueName = false, valId, ch))
+                        None
+                    }
+                    else
+                        Some(valSyns)
+                })
+
+                chunks.distinct.foreach(chunks ⇒ addSynonym(
+                    isElementId = false,
+                    isValueName = false,
+                    valName,
+                    chunks
+                ))
             }
         }
 
-        val valLdrs = mutable.HashSet.empty[NCValueLoader]
-
+        // Discard value loaders.
         for (elm ← mdl.getElements.asScala) {
             val ldr = elm.getValueLoader
 
             if (ldr != null)
-                valLdrs += ldr
+                ldr.onDiscard()
         }
 
-        // Discard value loaders, if any.
-        for (ldr ← valLdrs)
-            ldr.onDiscard()
-
-        val allAliases =
-            syns
-                .flatMap(_.syn)
-                .groupBy(_.origText)
-                .map(x ⇒ (x._1, x._2.map(_.alias).filter(_ != null)))
-                .values
-                .flatten
-                .toList
+        val allAliases = syns
+            .flatMap(_.syn)
+            .groupBy(_.origText)
+            .map(x ⇒ (x._1, x._2.map(_.alias).filter(_ != null)))
+            .values
+            .flatten
+            .toList
 
         // Check for DSl alias uniqueness.
         if (U.containsDups(allAliases))
@@ -973,7 +961,7 @@ object NCDeployManager extends NCService with DecorateAsScala {
         val dups = adds.intersect(excls)
 
         if (dups.nonEmpty)
-            throw new NCE(s"Duplicate stems detected in additional and excluded stopwords [" +
+            throw new NCE(s"Duplicate stems detected between additional and excluded stopwords [" +
                 s"mdlId=$mdlId, " +
                 s"dups=${dups.mkString(",")}" +
             s"]")
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/query/NCQueryManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/query/NCQueryManager.scala
index ae444a6..91d09a0 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/query/NCQueryManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/query/NCQueryManager.scala
@@ -267,7 +267,9 @@ object NCQueryManager extends NCService with NCIgniteInstance with NCOpenCensusS
                 tbl += (s"${ansiBlueFg}Agent$ansiReset", usrAgent.getOrElse("<n/a>"))
                 tbl += (s"${ansiBlueFg}Remote Address$ansiReset", rmtAddr.getOrElse("<n/a>"))
                 tbl += (s"${ansiBlueFg}Server Request ID$ansiReset", srvReqId)
-                tbl += (s"${ansiBlueFg}Data$ansiReset", data.getOrElse(""))
+
+                // TODO: need to pretty print data JSON
+                // tbl += (s"${ansiBlueFg}Data$ansiReset", data.getOrElse(""))
 
                 logger.info(s"New request received:\n$tbl")
 
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/rest/NCBasicRestApi.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/rest/NCBasicRestApi.scala
index 4adf4e7..6456738 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/rest/NCBasicRestApi.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/rest/NCBasicRestApi.scala
@@ -21,7 +21,7 @@ import akka.http.scaladsl.coding.Coders
 import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
 import akka.http.scaladsl.model.HttpMethods._
 import akka.http.scaladsl.model._
-import akka.http.scaladsl.model.headers.{`Access-Control-Allow-Credentials`, `Access-Control-Allow-Headers`, `Access-Control-Allow-Methods`, `Access-Control-Allow-Origin`}
+import akka.http.scaladsl.model.headers._
 import akka.http.scaladsl.server.Directives.{entity, _}
 import akka.http.scaladsl.server.{ExceptionHandler, RejectionHandler, Route}
 import com.google.gson.Gson
@@ -75,10 +75,10 @@ class NCBasicRestApi extends NCRestApi with LazyLogging with NCOpenCensusTrace w
     case class NotImplemented() extends NCE("Not implemented.")
 
     class InvalidArguments(msg: String) extends NCE(msg)
-    case class OutOfRangeField(fn: String, from: Number, to: Number) extends InvalidArguments(s"API field '$fn' value is out of range ($from, $to).")
-    case class TooLargeField(fn: String, max: Int) extends InvalidArguments(s"API field '$fn' value exceeded max length of $max.")
-    case class InvalidField(fn: String) extends InvalidArguments(s"API invalid field '$fn'")
-    case class EmptyField(fn: String) extends InvalidArguments(s"API field '$fn' value cannot be empty.")
+    case class OutOfRangeField(fn: String, from: Number, to: Number) extends InvalidArguments(s"API field `$fn` value is out of range ($from, $to).")
+    case class TooLargeField(fn: String, max: Int) extends InvalidArguments(s"API field `$fn` value exceeded max length of $max.")
+    case class InvalidField(fn: String) extends InvalidArguments(s"API invalid field `$fn`")
+    case class EmptyField(fn: String) extends InvalidArguments(s"API field `$fn` value cannot be empty.")
     case class InvalidExternalUserId(usrExtId: String) extends InvalidArguments(s"External user ID is invalid or unknown: $usrExtId")
     case class InvalidUserId(id: Long) extends InvalidArguments(s"User ID is invalid or unknown: $id")
 
@@ -865,8 +865,8 @@ class NCBasicRestApi extends NCRestApi with LazyLogging with NCOpenCensusTrace w
             startScopedSpan(
                 "clear$Dialog",
                 "acsTok" → req.acsTok,
-                "mdlId" → req.mdlId,
                 "usrExtId" → req.usrExtId.orNull,
+                "mdlId" → req.mdlId,
                 "usrId" → req.usrId.getOrElse(-1)) { span ⇒
                     checkLength("acsTok" → req.acsTok, "mdlId" → req.mdlId, "usrExtId" → req.usrExtId)
 
@@ -1396,7 +1396,7 @@ class NCBasicRestApi extends NCRestApi with LazyLogging with NCOpenCensusTrace w
         implicit val resFmt: RootJsonFormat[Res$User$Update] = jsonFormat1(Res$User$Update)
 
         entity(as[Req$User$Update]) { req ⇒
-            startScopedSpan("user$Update", "acsTok" → req.acsTok, "usrId" → req.id.getOrElse(() ⇒ null)) { span ⇒
+            startScopedSpan("user$Update", "acsTok" → req.acsTok, "usrId" → req.id.getOrElse(-1)) { span ⇒
                 checkLength(
                     "acsTok" → req.acsTok,
                     "firstName" → req.firstName,
@@ -1443,7 +1443,7 @@ class NCBasicRestApi extends NCRestApi with LazyLogging with NCOpenCensusTrace w
 
         entity(as[Req$User$Delete]) { req ⇒
             startScopedSpan(
-                "user$Delete", "acsTok" → req.acsTok, "usrId" → req.id.getOrElse(() ⇒ null)
+                "user$Delete", "acsTok" → req.acsTok, "usrId" → req.id.getOrElse(-1)
             ) { span ⇒
                 checkLength("acsTok" → req.acsTok, "usrExtId" → req.usrExtId)
 


[incubator-nlpcraft] 01/02: WIP.

Posted by ar...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

aradzinski pushed a commit to branch NLPCRAFT-128
in repository https://gitbox.apache.org/repos/asf/incubator-nlpcraft.git

commit 892f811c06f73af46a07239361d0cd66d6ada58c
Author: Aaron Radzinski <ar...@datalingvo.com>
AuthorDate: Wed Sep 16 23:51:18 2020 -0700

    WIP.
---
 .../nlp/core/stanford/NCStanfordCoreManager.scala  |  14 ++-
 .../nlp/core/stanford/NCStanfordNerEnricher.scala  |  19 ++-
 .../nlp/core/stanford/NCStanfordParser.scala       |  13 +-
 .../org/apache/nlpcraft/common/NCService.scala     |  53 ++++++---
 .../common/extcfg/NCExternalConfigManager.scala    |  15 ++-
 .../common/nlp/core/NCNlpCoreManager.scala         |  21 +++-
 .../nlp/core/opennlp/NCOpenNlpTokenizer.scala      |  25 +++-
 .../common/nlp/dict/NCDictionaryManager.scala      |  12 +-
 .../common/nlp/numeric/NCNumericManager.scala      |  10 +-
 .../test/impl/NCTestAutoModelValidatorImpl.scala   |   5 +-
 .../probe/mgrs/conn/NCConnectionManager.scala      |   4 +-
 .../mgrs/conversation/NCConversationManager.scala  |  13 +-
 .../probe/mgrs/deploy/NCDeployManager.scala        |  39 +++---
 .../mgrs/dialogflow/NCDialogFlowManager.scala      |  13 +-
 .../probe/mgrs/lifecycle/NCLifecycleManager.scala  |  22 +++-
 .../nlpcraft/probe/mgrs/model/NCModelManager.scala |  32 +++--
 .../nlpcraft/probe/mgrs/nlp/NCProbeEnricher.scala  |   3 +-
 .../probe/mgrs/nlp/NCProbeEnrichmentManager.scala  |  35 ++++--
 .../dictionary/NCDictionaryEnricher.scala          |  23 ++--
 .../mgrs/nlp/enrichers/limit/NCLimitEnricher.scala |  16 ++-
 .../mgrs/nlp/enrichers/model/NCModelEnricher.scala |  13 +-
 .../enrichers/relation/NCRelationEnricher.scala    |  14 ++-
 .../mgrs/nlp/enrichers/sort/NCSortEnricher.scala   |  19 ++-
 .../enrichers/stopword/NCStopWordEnricher.scala    |  37 +++---
 .../suspicious/NCSuspiciousNounsEnricher.scala     |  13 +-
 .../mgrs/nlp/validate/NCValidateManager.scala      |  17 ++-
 .../nlpcraft/server/company/NCCompanyManager.scala |  17 ++-
 .../server/feedback/NCFeedbackManager.scala        |  15 ++-
 .../apache/nlpcraft/server/geo/NCGeoManager.scala  |  26 ++--
 .../geo/tools/NCGeoSyntheticNamesGenerator.scala   |   3 +-
 .../lifecycle/NCServerLifecycleManager.scala       |  21 +++-
 .../server/nlp/core/NCNlpNerEnricher.scala         |   4 +-
 .../server/nlp/core/NCNlpServerManager.scala       |  13 +-
 .../nlp/core/google/NCGoogleNerEnricher.scala      |  23 +++-
 .../nlp/core/opennlp/NCOpenNlpNerEnricher.scala    |  25 +++-
 .../server/nlp/core/opennlp/NCOpenNlpParser.scala  |  19 ++-
 .../server/nlp/core/spacy/NCSpaCyNerEnricher.scala |  21 +++-
 .../nlp/enrichers/NCServerEnrichmentManager.scala  |  14 ++-
 .../nlp/enrichers/basenlp/NCBaseNlpEnricher.scala  |  26 +++-
 .../coordinate/NCCoordinatesEnricher.scala         |  19 ++-
 .../server/nlp/enrichers/date/NCDateEnricher.scala | 131 +++++++++++----------
 .../server/nlp/enrichers/geo/NCGeoEnricher.scala   |  21 +++-
 .../nlp/enrichers/numeric/NCNumericEnricher.scala  |  23 +++-
 .../nlp/enrichers/quote/NCQuoteEnricher.scala      |  23 +++-
 .../enrichers/stopword/NCStopWordEnricher.scala    |  16 ++-
 .../server/nlp/preproc/NCPreProcessManager.scala   |  17 ++-
 .../server/nlp/spell/NCSpellCheckManager.scala     |  19 ++-
 .../server/nlp/wordnet/NCWordNetManager.scala      |  16 ++-
 .../nlpcraft/server/probe/NCProbeManager.scala     |  18 +--
 .../server/proclog/NCProcessLogManager.scala       |  13 +-
 .../nlpcraft/server/query/NCQueryManager.scala     |  17 ++-
 .../nlpcraft/server/rest/NCRestManager.scala       |  15 ++-
 .../apache/nlpcraft/server/sql/NCSqlManager.scala  |  16 ++-
 .../server/sugsyn/NCSuggestSynonymManager.scala    |  15 ++-
 .../apache/nlpcraft/server/tx/NCTxManager.scala    |  29 ++---
 .../nlpcraft/server/user/NCUserManager.scala       |  13 +-
 56 files changed, 797 insertions(+), 351 deletions(-)

diff --git a/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/server/nlp/core/stanford/NCStanfordCoreManager.scala b/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/server/nlp/core/stanford/NCStanfordCoreManager.scala
index 7dd2581..f5b81a9 100644
--- a/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/server/nlp/core/stanford/NCStanfordCoreManager.scala
+++ b/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/server/nlp/core/stanford/NCStanfordCoreManager.scala
@@ -36,8 +36,10 @@ object NCStanfordCoreManager extends NCService with NCIgniteInstance {
     @volatile private var cache: IgniteCache[String, CoreDocument] = _
 
     /**
-      * Starts this component.
-      */
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = {
         val p = new Properties()
 
@@ -51,13 +53,17 @@ object NCStanfordCoreManager extends NCService with NCIgniteInstance {
             cache = ignite.cache[String, CoreDocument]("stanford-cache")
         }
 
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = {
         cache = null
 
-        super.stop()
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/server/nlp/core/stanford/NCStanfordNerEnricher.scala b/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/server/nlp/core/stanford/NCStanfordNerEnricher.scala
index 09131e6..53719cd 100644
--- a/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/server/nlp/core/stanford/NCStanfordNerEnricher.scala
+++ b/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/server/nlp/core/stanford/NCStanfordNerEnricher.scala
@@ -30,32 +30,41 @@ import scala.collection.JavaConverters._
   * Stanford NLP NER enricher.
   */
 object NCStanfordNerEnricher extends NCService with NCNlpNerEnricher with NCIgniteInstance {
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { span ⇒
         // Should be started even if another NLP engine configured.
         if (!NCStanfordCoreManager.isStarted)
             NCStanfordCoreManager.start(span)
 
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { span ⇒
         if (NCStanfordCoreManager.isStarted)
             NCStanfordCoreManager.stop(span)
     
-        super.stop()
+        ackStop()
     }
     
     /**
      *
      * @param ns
-     * @param enabledBuiltInToks Set of enabled built-in token IDs.
+     * @param ebiTokens Set of enabled built-in token IDs.
      */
-    override def enrich(ns: NCNlpSentence, enabledBuiltInToks: Set[String], parent: Span = null): Unit =
+    override def enrich(ns: NCNlpSentence, ebiTokens: Set[String], parent: Span = null): Unit =
         startScopedSpan("enrich", parent, "srvReqId" → ns.srvReqId, "txt" → ns.text) { _ ⇒
             NCStanfordCoreManager.
                 annotate(ns.text).
                 entityMentions().asScala.
-                filter(e ⇒ enabledBuiltInToks.contains(e.entityType().toLowerCase)).
+                filter(e ⇒ ebiTokens.contains(e.entityType().toLowerCase)).
                 foreach(e ⇒ {
                     val offsets = e.charOffsets()
     
diff --git a/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/server/nlp/core/stanford/NCStanfordParser.scala b/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/server/nlp/core/stanford/NCStanfordParser.scala
index 063fa61..646eefc 100644
--- a/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/server/nlp/core/stanford/NCStanfordParser.scala
+++ b/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/server/nlp/core/stanford/NCStanfordParser.scala
@@ -33,19 +33,28 @@ import scala.collection.Seq
   * Stanford NLP parser implementation.
   */
 object NCStanfordParser extends NCService with NCNlpParser with NCIgniteInstance {
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { span ⇒
         // Should be started even if another NLP engine configured.
         if (!NCStanfordCoreManager.isStarted)
             NCStanfordCoreManager.start(span)
 
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { span ⇒
         if (NCStanfordCoreManager.isStarted)
             NCStanfordCoreManager.stop(span)
 
-        super.stop()
+        ackStop()
     }
 
     override def parse(normTxt: String, parent: Span = null): Seq[NCNlpWord] =
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/NCService.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/NCService.scala
index b71e647..33b5559 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/NCService.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/NCService.scala
@@ -46,38 +46,53 @@ abstract class NCService extends LazyLogging with NCOpenCensusTrace {
       * @param parent Optional parent span.
       */
     @throws[NCE]
-    def start(parent: Span = null): NCService = {
+    def start(parent: Span = null): NCService =
+        // Make sure this is not called by subclass.
+        throw new AssertionError()
+
+    /**
+      * Stops this service.
+      *
+      * @param parent Optional parent span.
+      */
+    @throws[NCE]
+    def stop(parent: Span = null): Unit =
+        // Make sure this is not called by subclass.
+        throw new AssertionError()
+
+    /**
+     * Gets name of this service (as its class name).
+     *
+     * @return Name of this service.
+     */
+    def name: String = clsName
+
+    /**
+     * Acks started service. Should be called at the end of the `start()` method.
+     */
+    protected def ackStart(): NCService = {
         require(!started)
-        
+
         started = true
 
         val dur = s"$ansiBlueFg[${currentTime - startMs}ms]$ansiReset"
 
-        logger.info(s"$clsName started $dur")
-        
-        addTags(currentSpan(),
+        addTags(
+            currentSpan(),
             "startDurationMs" → (currentTime - startMs), "state" → started
         )
-        
+
+        logger.info(s"$clsName started $dur")
+
         this
     }
 
     /**
-     * Gets name of this service (as its class name).
-     *
-     * @return Name of this service.
+     * Acks stopped service. Should be called at the end of the `stop()` method.
      */
-    def name: String = clsName
-
-    /**
-      * Stops this service.
-      *
-      * @param parent Optional parent span.
-      */
-    @throws[NCE]
-    def stop(parent: Span = null): Unit = {
+    protected def ackStop(): Unit = {
         started = false
-    
+
         addTags(currentSpan(),
             "state" → started
         )
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/extcfg/NCExternalConfigManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/extcfg/NCExternalConfigManager.scala
index bb9019e..f400eba 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/extcfg/NCExternalConfigManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/extcfg/NCExternalConfigManager.scala
@@ -156,13 +156,24 @@ object NCExternalConfigManager extends NCService {
         val downTypes = m.asScala
 
         if (downTypes.nonEmpty) {
-            U.executeParallel(downTypes.values.toSeq.map(d ⇒ () ⇒ clearDir(d)): _*)
+            U.executeParallel(
+                downTypes.values.toSeq.map(d ⇒ () ⇒ clearDir(d)): _*
+            )
             U.executeParallel(
                 downTypes.keys.toSeq.flatMap(t ⇒ FILES(t).toSeq.map(f ⇒ Download(f, t))).map(d ⇒ () ⇒ download(d)): _*
             )
         }
 
-        super.start(parent)
+        ackStart()
+    }
+
+    /**
+     * Stops this service.
+     *
+     * @param parent Optional parent span.
+     */
+    override def stop(parent: Span): Unit = startScopedSpan("atop", parent) { _ ⇒
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/core/NCNlpCoreManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/core/NCNlpCoreManager.scala
index febc0f9..e9033c3 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/core/NCNlpCoreManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/core/NCNlpCoreManager.scala
@@ -39,6 +39,11 @@ object NCNlpCoreManager extends NCService {
       */
     def getEngine: String = Config.engine
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { span ⇒
         // NOTE: DO NOT confuse this with token providers.
         if (!SUPPORTED_NLP_ENGINES.contains(Config.engine))
@@ -60,16 +65,20 @@ object NCNlpCoreManager extends NCService {
 
         tokenizer.start()
 
-        super.start()
+        ackStart()
     }
-    
-    override def stop(parent: Span): Unit = {
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
+    override def stop(parent: Span): Unit = startScopedSpan("stop", parent) {span ⇒
         if (tokenizer != null)
-            tokenizer.stop(parent)
+            tokenizer.stop(span)
 
-        startScopedSpan("stop", parent)(_ ⇒ super.stop())
+        ackStop()
     }
-    
+
     /**
       * Stems given word or a sequence of words which will be tokenized before.
       *
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/core/opennlp/NCOpenNlpTokenizer.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/core/opennlp/NCOpenNlpTokenizer.scala
index 60ddfc9..7b29938 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/core/opennlp/NCOpenNlpTokenizer.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/core/opennlp/NCOpenNlpTokenizer.scala
@@ -34,22 +34,37 @@ object NCOpenNlpTokenizer extends NCNlpTokenizer {
 
     @volatile private var tokenizer: Tokenizer = _
 
-    override def start(parent: Span): NCService = {
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
+    override def start(parent: Span): NCService = startScopedSpan("start", parent) { _ ⇒
         tokenizer = managed(NCExternalConfigManager.getStream(OPENNLP, RESOURCE)) acquireAndGet { in ⇒
             new TokenizerME(new TokenizerModel(in))
         }
 
-        super.start(parent)
+        ackStart()
     }
 
-    override def stop(parent: Span): Unit = {
+    /**
+     *
+     * @param parent Optional parent span.
+     */
+    override def stop(parent: Span): Unit = startScopedSpan("start", parent) { _ ⇒
         tokenizer = null
 
-        super.stop(parent)
+        ackStop()
     }
 
+    /**
+     *
+     * @param sen Sentence
+     * @return
+     */
     override def tokenize(sen: String): Seq[NCNlpCoreToken] =
         this.synchronized {
             tokenizer.tokenizePos(sen)
-        }.toSeq.map(s ⇒ NCNlpCoreToken(s.getCoveredText(sen).toString, s.getStart, s.getEnd, s.length()))
+        }
+        .toSeq.map(s ⇒ NCNlpCoreToken(s.getCoveredText(sen).toString, s.getStart, s.getEnd, s.length()))
 }
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/dict/NCDictionaryManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/dict/NCDictionaryManager.scala
index 890368c..edd5a5c 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/dict/NCDictionaryManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/dict/NCDictionaryManager.scala
@@ -43,7 +43,7 @@ object NCDictionaryManager extends NCService {
     @volatile private var full: Set[String] = _
     @volatile private var dicts: Map[NCDictionaryType, Set[String]] = _
     
-    override def start(parent: Span): NCService = startScopedSpan("start", parent, "dictionaries" → dictFiles.values.mkString(",")) { _ ⇒
+    override def start(parent: Span = null): NCService = startScopedSpan("start", parent, "dicts" → dictFiles.values.mkString(",")) { _ ⇒
         dicts = dictFiles.map(p ⇒ {
             val wordType = p._1
             val path = p._2
@@ -70,11 +70,15 @@ object NCDictionaryManager extends NCService {
         // Read summary dictionary.
         full = dicts.flatMap(_._2).toSet
         
-        super.start()
+        ackStart()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/numeric/NCNumericManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/numeric/NCNumericManager.scala
index 2ebb756..3abfba1 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/numeric/NCNumericManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/common/nlp/numeric/NCNumericManager.scala
@@ -117,9 +117,13 @@ object NCNumericManager extends NCService {
         else
             None
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
     
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
@@ -262,7 +266,7 @@ object NCNumericManager extends NCService {
         unitsStem = hs.map(p ⇒ p.stem → NCNumericUnit(p.name, p.unitType)).toMap
         maxSynWords = (unitsOrigs ++ unitsStem).keySet.map(_.split(" ").length).max
         
-        super.start()
+        ackStart()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/model/tools/test/impl/NCTestAutoModelValidatorImpl.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/model/tools/test/impl/NCTestAutoModelValidatorImpl.scala
index 13c4516..8c680dd 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/model/tools/test/impl/NCTestAutoModelValidatorImpl.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/model/tools/test/impl/NCTestAutoModelValidatorImpl.scala
@@ -118,10 +118,7 @@ private [test] object NCTestAutoModelValidatorImpl extends LazyLogging {
         val passCnt = results.count(_.pass)
         val failCnt = results.count(!_.pass)
         
-        if (failCnt > 0)
-            logger.error(s"Some model auto-validation failed - see details below...")
-        
-        logger.info(s"\n\nModel auto-validation results: " +
+        logger.info(s"Model auto-validation results: " +
             s"${ansiGreenFg}OK$ansiReset $passCnt, ${ansiRedFg}FAIL$ansiReset $failCnt:\n${tbl.toString}"
         )
         
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/conn/NCConnectionManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/conn/NCConnectionManager.scala
index 7c6938f..0280d74 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/conn/NCConnectionManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/conn/NCConnectionManager.scala
@@ -462,7 +462,7 @@ object NCConnectionManager extends NCService {
         // Only return when probe successfully connected to the server.
         ctrlLatch.await()
      
-        super.start()
+        ackStart()
     }
     
     /**
@@ -473,6 +473,6 @@ object NCConnectionManager extends NCService {
     
         U.stopThread(ctrlThread)
         
-        super.stop()
+        ackStop()
     }
 }
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/conversation/NCConversationManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/conversation/NCConversationManager.scala
index 1e8e9f2..cca7d5a 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/conversation/NCConversationManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/conversation/NCConversationManager.scala
@@ -49,6 +49,11 @@ object NCConversationManager extends NCService {
     @volatile private var convs: mutable.Map[Key, Value] = _
     @volatile private var gc: ScheduledExecutorService = _
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         gc = Executors.newSingleThreadScheduledExecutor
 
@@ -58,15 +63,19 @@ object NCConversationManager extends NCService {
 
         logger.info(s"Conversation manager GC started, checking every ${Config.timeoutMs}ms.")
 
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
         U.shutdownPools(gc)
 
         logger.info("Conversation manager GC stopped.")
 
-        super.stop()
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala
index 71cea85..9ba109d 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/deploy/NCDeployManager.scala
@@ -81,7 +81,7 @@ object NCDeployManager extends NCService with DecorateAsScala {
     type Callback = Function[NCIntentMatch, NCResult]
 
     @volatile private var data: ArrayBuffer[NCProbeModel] = _
-    @volatile private var modelFactory: NCModelFactory = _
+    @volatile private var mdlFactory: NCModelFactory = _
 
     object Config extends NCConfigurable {
         private final val pre = "nlpcraft.probe"
@@ -225,7 +225,7 @@ object NCDeployManager extends NCService with DecorateAsScala {
                     }
                     else
                         logger.trace(
-                            s"Synonym already added (ignoring) [" +
+                            s"Synonym already added (safely ignoring) [" +
                                 s"mdlId=$mdlId, " +
                                 s"elmId=$elmId, " +
                                 s"syn=${chunks.mkString(" ")}, " +
@@ -318,7 +318,7 @@ object NCDeployManager extends NCService with DecorateAsScala {
             val synsChunks = for (syn ← elm.getSynonyms.asScala.flatMap(parser.expand)) yield chunkSplit(syn)
 
             if (U.containsDups(synsChunks.flatten.toList))
-                logger.trace(s"Model element synonym dups found (ignoring) [" +
+                logger.warn(s"Model element synonym dups found [" +
                     s"mdlId=$mdlId, " +
                     s"elmId=$elmId, " +
                     s"synonym=${synsChunks.diff(synsChunks.distinct).distinct.map(_.mkString(",")).mkString(";")}" +
@@ -334,7 +334,7 @@ object NCDeployManager extends NCService with DecorateAsScala {
             val valNames = vals.map(_.getName).toList
 
             if (U.containsDups(valNames))
-                logger.trace(s"Model element values names dups found (ignoring) [" +
+                logger.warn(s"Model element values names dups found [" +
                     s"mdlId=$mdlId, " +
                     s"elmId=$elmId, " +
                     s"names=${valNames.diff(valNames.distinct).distinct.mkString(",")}" +
@@ -366,7 +366,7 @@ object NCDeployManager extends NCService with DecorateAsScala {
                     })
 
                 if (U.containsDups(chunks.toList))
-                    logger.trace(s"Model element value synonyms dups found (ignoring) [" +
+                    logger.warn(s"Model element value synonyms dups found [" +
                         s"mdlId=$mdlId, " +
                         s"elmId=$elmId, " +
                         s"valId=$valId, " +
@@ -553,11 +553,11 @@ object NCDeployManager extends NCService with DecorateAsScala {
       */
     @throws[NCE]
     private def makeModelFromSource(cls: Class[_ <: NCModel], src: String): NCModel =
-        catching(classOf[Throwable]) either modelFactory.mkModel(cls) match {
+        catching(classOf[Throwable]) either mdlFactory.mkModel(cls) match {
             case Left(e) ⇒
                 throw new NCE(s"Failed to instantiate model [" +
                     s"cls=${cls.getName}, " +
-                    s"factory=${modelFactory.getClass.getName}, " +
+                    s"factory=${mdlFactory.getClass.getName}, " +
                     s"src=$src" +
                 "]", e)
 
@@ -606,18 +606,24 @@ object NCDeployManager extends NCService with DecorateAsScala {
         )
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @throws NCE
+     * @return
+     */
     @throws[NCE]
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         data = ArrayBuffer.empty[NCProbeModel]
 
-        modelFactory = new NCBasicModelFactory
+        mdlFactory = new NCBasicModelFactory
 
         // Initialize model factory (if configured).
         Config.modelFactoryType match {
             case Some(mft) ⇒
-                modelFactory = makeModelFactory(mft)
+                mdlFactory = makeModelFactory(mft)
 
-                modelFactory.initialize(Config.modelFactoryProps.getOrElse(Map.empty[String, String]).asJava)
+                mdlFactory.initialize(Config.modelFactoryProps.getOrElse(Map.empty[String, String]).asJava)
 
             case None ⇒ // No-op.
         }
@@ -647,18 +653,23 @@ object NCDeployManager extends NCService with DecorateAsScala {
         if (U.containsDups(ids))
             throw new NCE(s"Duplicate model IDs detected: ${ids.mkString(", ")}")
 
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @throws NCE
+     */
     @throws[NCE]
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        if (modelFactory != null)
-            modelFactory.terminate()
+        if (mdlFactory != null)
+            mdlFactory.terminate()
 
         if (data != null)
             data.clear()
 
-        super.stop()
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/dialogflow/NCDialogFlowManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/dialogflow/NCDialogFlowManager.scala
index 4b54447..b59e811 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/dialogflow/NCDialogFlowManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/dialogflow/NCDialogFlowManager.scala
@@ -49,6 +49,11 @@ object NCDialogFlowManager extends NCService {
     @volatile private var flow: mutable.Map[Key, ArrayBuffer[Value]] = _
     @volatile private var gc: ScheduledExecutorService = _
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         flow = mutable.HashMap.empty[Key, ArrayBuffer[Value]]
 
@@ -58,15 +63,19 @@ object NCDialogFlowManager extends NCService {
 
         logger.info(s"Dialog flow manager GC started, checking every ${Config.timeoutMs}ms.")
 
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
         U.shutdownPools(gc)
 
         logger.info("Dialog flow manager GC stopped.")
 
-        super.stop()
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/lifecycle/NCLifecycleManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/lifecycle/NCLifecycleManager.scala
index 4bbf8a4..2829718 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/lifecycle/NCLifecycleManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/lifecycle/NCLifecycleManager.scala
@@ -32,13 +32,29 @@ object NCLifecycleManager extends NCService {
         def lifecycle: Seq[String] = getStringList("nlpcraft.probe.lifecycle")
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @throws NCE
+     * @return
+     */
     @throws[NCE]
-    override def start(parent: Span = null): NCService = {
+    override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         beans = Config.lifecycle.map(U.mkObject(_).asInstanceOf[NCLifecycle])
     
-        super.start()
+        ackStart()
     }
-    
+
+
+    /**
+     * Stops this service.
+     *
+     * @param parent Optional parent span.
+     */
+    override def stop(parent: Span): Unit = startScopedSpan("start", parent) { _ ⇒
+        ackStop()
+    }
+
     /**
       * Called before any other probe managers are started.
       * Default implementation is a no-op.
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/model/NCModelManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/model/NCModelManager.scala
index b220a8c..a23ccc3 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/model/NCModelManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/model/NCModelManager.scala
@@ -37,6 +37,12 @@ object NCModelManager extends NCService with DecorateAsScala {
     // Access mutex.
     private final val mux = new Object()
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @throws NCE
+     * @return
+     */
     @throws[NCE]
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { span ⇒
         val tbl = NCAsciiTable("Model ID", "Name", "Ver.", "Elements", "Synonyms", "Intents")
@@ -71,7 +77,19 @@ object NCModelManager extends NCService with DecorateAsScala {
             "deployedModels" → data.values.map(_.model.getId).mkString(",")
         )
 
-        super.start()
+        ackStart()
+    }
+
+    /**
+     * Stops this component.
+     */
+    override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
+        mux.synchronized {
+            if (data != null)
+                data.values.foreach(m ⇒ discardModel(m.model))
+        }
+
+        ackStop()
     }
 
     /**
@@ -90,18 +108,6 @@ object NCModelManager extends NCService with DecorateAsScala {
     }
 
     /**
-      * Stops this component.
-      */
-    override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        mux.synchronized {
-            if (data != null)
-                data.values.foreach(m ⇒ discardModel(m.model))
-        }
-
-        super.stop()
-    }
-
-    /**
       *
       * @return
       */
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/NCProbeEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/NCProbeEnricher.scala
index e6430ff..5d31a39 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/NCProbeEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/NCProbeEnricher.scala
@@ -19,7 +19,6 @@ package org.apache.nlpcraft.probe.mgrs.nlp
 
 import java.io.Serializable
 
-import com.typesafe.scalalogging.LazyLogging
 import io.opencensus.trace.Span
 import org.apache.nlpcraft.common.nlp._
 import org.apache.nlpcraft.common.{NCService, _}
@@ -31,7 +30,7 @@ import scala.language.implicitConversions
 /**
  * Base class for NLP enricher.
  */
-abstract class NCProbeEnricher extends NCService with LazyLogging {
+abstract class NCProbeEnricher extends NCService {
     /**
       *
       * Processes this NLP sentence.
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/NCProbeEnrichmentManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/NCProbeEnrichmentManager.scala
index 1195153..4dce63f 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/NCProbeEnrichmentManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/NCProbeEnrichmentManager.scala
@@ -18,13 +18,11 @@
 package org.apache.nlpcraft.probe.mgrs.nlp
 
 import java.io.Serializable
-import java.time.LocalDateTime
 import java.util
 import java.util.{Date, Objects}
 import java.util.concurrent.{ExecutorService, Executors}
 import java.util.function.Predicate
 
-import akka.http.scaladsl.model.DateTime
 import io.opencensus.trace.{Span, Status}
 import org.apache.nlpcraft.common.NCErrorCodes._
 import org.apache.nlpcraft.common._
@@ -88,15 +86,25 @@ object NCProbeEnrichmentManager extends NCService with NCOpenCensusModelStats {
 
     Config.check()
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         embeddedCbs = mutable.HashSet.empty[EMBEDDED_CB]
 
         pool = Executors.newFixedThreadPool(8 * Runtime.getRuntime.availableProcessors())
+
         executor = ExecutionContext.fromExecutor(pool)
 
-        super.start()
+        ackStart()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
         mux.synchronized {
             if (embeddedCbs != null)
@@ -106,16 +114,17 @@ object NCProbeEnrichmentManager extends NCService with NCOpenCensusModelStats {
         U.shutdownPools(pool)
 
         executor = null
+
         pool = null
 
-        super.stop()
+        ackStop()
     }
 
     /**
       *
       * @param cb Callback.
       */
-    private [probe] def addEmbeddedCallback(cb: EMBEDDED_CB): Unit = {
+    private[probe] def addEmbeddedCallback(cb: EMBEDDED_CB): Unit = {
         mux.synchronized {
             embeddedCbs.add(cb)
         }
@@ -125,7 +134,7 @@ object NCProbeEnrichmentManager extends NCService with NCOpenCensusModelStats {
       *
       * @param cb Callback.
       */
-    private [probe] def removeEmbeddedCallback(cb: EMBEDDED_CB): Unit = {
+    private[probe] def removeEmbeddedCallback(cb: EMBEDDED_CB): Unit = {
         mux.synchronized {
             embeddedCbs.remove(cb)
         }
@@ -224,12 +233,12 @@ object NCProbeEnrichmentManager extends NCService with NCOpenCensusModelStats {
         tbl += (s"${ansiBlueFg}Text$ansiReset", nlpSens.map(_.text))
         tbl += (s"${ansiBlueFg}Model ID$ansiReset", mdlId)
         tbl += (s"${ansiBlueFg}User ID$ansiReset", usrId)
-        tbl += (s"${ansiBlueFg}  First Name$ansiReset", senMeta.getOrElse("FIRST_NAME", ""))
-        tbl += (s"${ansiBlueFg}  Last Name$ansiReset", senMeta.getOrElse("LAST_NAME", ""))
-        tbl += (s"${ansiBlueFg}  Email$ansiReset", senMeta.getOrElse("EMAIL", ""))
-        tbl += (s"${ansiBlueFg}  Company Name$ansiReset", senMeta.getOrElse("COMPANY_NAME", ""))
-        tbl += (s"${ansiBlueFg}  Is Admin$ansiReset", senMeta.getOrElse("IS_ADMIN", ""))
-        tbl += (s"${ansiBlueFg}  Signup Date$ansiReset", new Date(java.lang.Long.parseLong(senMeta("SIGNUP_TSTAMP").toString)))
+        tbl += (s"$ansiBlueFg  First Name$ansiReset", senMeta.getOrElse("FIRST_NAME", ""))
+        tbl += (s"$ansiBlueFg  Last Name$ansiReset", senMeta.getOrElse("LAST_NAME", ""))
+        tbl += (s"$ansiBlueFg  Email$ansiReset", senMeta.getOrElse("EMAIL", ""))
+        tbl += (s"$ansiBlueFg  Company Name$ansiReset", senMeta.getOrElse("COMPANY_NAME", ""))
+        tbl += (s"$ansiBlueFg  Is Admin$ansiReset", senMeta.getOrElse("IS_ADMIN", ""))
+        tbl += (s"$ansiBlueFg  Signup Date$ansiReset", new Date(java.lang.Long.parseLong(senMeta("SIGNUP_TSTAMP").toString)))
         tbl += (s"${ansiBlueFg}User Agent$ansiReset", senMeta.getOrElse("USER_AGENT", ""))
         tbl += (s"${ansiBlueFg}Remote Address$ansiReset", senMeta.getOrElse("REMOTE_ADDR", ""))
         tbl += (s"${ansiBlueFg}Server Timestamp$ansiReset", new Date(java.lang.Long.parseLong(senMeta("RECEIVE_TSTAMP").toString)))
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/dictionary/NCDictionaryEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/dictionary/NCDictionaryEnricher.scala
index b96e956..12fe98e 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/dictionary/NCDictionaryEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/dictionary/NCDictionaryEnricher.scala
@@ -38,19 +38,24 @@ object NCDictionaryEnricher extends NCProbeEnricher {
     @volatile private var swearWords: Set[String] = _
 
     /**
-      * Starts this component.
-      */
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
-        swearWords =
-            U.readTextResource(s"badfilter/swear_words.txt", "UTF-8", logger).
-                map(NCNlpCoreManager.stem).
-                toSet
+        swearWords = U.readTextResource(s"badfilter/swear_words.txt", "UTF-8", logger).
+            map(NCNlpCoreManager.stem).
+            toSet
 
-        super.start()
+        ackStart()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
     
     @throws[NCE]
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/limit/NCLimitEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/limit/NCLimitEnricher.scala
index d7560b9..37f040b 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/limit/NCLimitEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/limit/NCLimitEnricher.scala
@@ -144,8 +144,10 @@ object NCLimitEnricher extends NCProbeEnricher {
     private def isUserNotValue(n: NCNlpSentenceNote): Boolean = n.isUser && !n.contains("value")
 
     /**
-      * Starts this component.
-      */
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         // Note that single words only supported now in code.
         fuzzyNums = stemmatizeWords(Map(
@@ -202,12 +204,14 @@ object NCLimitEnricher extends NCProbeEnricher {
 
         techWords = (sortWords.keys ++ topWords ++ postWords ++ fuzzyNums.keySet).toSet
 
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
-
         fuzzyNums = null
         sortWords = null
         topWords = null
@@ -215,6 +219,8 @@ object NCLimitEnricher extends NCProbeEnricher {
         macros = null
         limits = null
         techWords = null
+
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/model/NCModelEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/model/NCModelEnricher.scala
index 420e6c4..a01f229 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/model/NCModelEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/model/NCModelEnricher.scala
@@ -100,12 +100,21 @@ object NCModelEnricher extends NCProbeEnricher with DecorateAsScala {
         }
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/relation/NCRelationEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/relation/NCRelationEnricher.scala
index 6f379b6..03d0922 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/relation/NCRelationEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/relation/NCRelationEnricher.scala
@@ -63,8 +63,10 @@ object NCRelationEnricher extends NCProbeEnricher {
     private var ALL_FUNC_STEMS: Set[String] = _
 
     /**
-      * Starts this component.
-      */
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         val macros = NCMacroParser()
 
@@ -111,11 +113,15 @@ object NCRelationEnricher extends NCProbeEnricher {
 
         ALL_FUNC_STEMS = FUNCS.flatMap(_.allStems).toSet
 
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/sort/NCSortEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/sort/NCSortEnricher.scala
index e331997..0c4f07d 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/sort/NCSortEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/sort/NCSortEnricher.scala
@@ -480,10 +480,15 @@ object NCSortEnricher extends NCProbeEnricher {
             }
         }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         // Single words.
-        sort =
-            Seq("sort", "rank", "classify", "order", "arrange", "organize", "segment", "shuffle").map(NCNlpCoreManager.stem)
+        sort = Seq("sort", "rank", "classify", "order", "arrange", "organize", "segment", "shuffle")
+            .map(NCNlpCoreManager.stem)
 
         // Single words.
         // Cannot be same as in SORT.
@@ -514,16 +519,20 @@ object NCSortEnricher extends NCProbeEnricher {
 
         validate()
 
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
-
         sort = null
         by = null
         order = null
         stemAnd = null
         maskWords = null
+
+        ackStop()
     }
 }
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/stopword/NCStopWordEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/stopword/NCStopWordEnricher.scala
index 8015d6c..0daf975 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/stopword/NCStopWordEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/stopword/NCStopWordEnricher.scala
@@ -48,19 +48,22 @@ object NCStopWordEnricher extends NCProbeEnricher {
     @volatile private var geoKindStops: Map[String, Seq[String]] = _
     @volatile private var numPrefixStops:Seq[String] = _
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
-        geoPreWords=
-            // NOTE: stemmatisation is done already by generator.
-            U.readTextResource(s"context/geo_pre_words.txt", "UTF-8", logger).
-                map(_.split(" ").toSeq).sortBy(-_.size)
-
-        geoKindStops =
-            Map(
-                "nlpcraft:city" → Seq("city", "town"),
-                "nlpcraft:country" → Seq("country", "land", "countryside", "area", "territory"),
-                "nlpcraft:region" → Seq("region", "area", "state", "county", "district", "ground", "territory"),
-                "nlpcraft:continent" → Seq("continent", "land", "area")
-            ).map(p ⇒ p._1 → p._2.map(NCNlpCoreManager.stem))
+        // NOTE: stemmatization is done already by generator.
+        geoPreWords = U.readTextResource(s"context/geo_pre_words.txt", "UTF-8", logger).
+            map(_.split(" ").toSeq).sortBy(-_.size)
+
+        geoKindStops = Map(
+            "nlpcraft:city" → Seq("city", "town"),
+            "nlpcraft:country" → Seq("country", "land", "countryside", "area", "territory"),
+            "nlpcraft:region" → Seq("region", "area", "state", "county", "district", "ground", "territory"),
+            "nlpcraft:continent" → Seq("continent", "land", "area")
+        ).map(p ⇒ p._1 → p._2.map(NCNlpCoreManager.stem))
 
         numPrefixStops = Seq(
             "is",
@@ -76,15 +79,19 @@ object NCStopWordEnricher extends NCProbeEnricher {
             "must be"
         ).map(NCNlpCoreManager.stem)
 
-        super.start()
+        ackStart()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
         geoPreWords = null
         geoKindStops = null
         numPrefixStops = null
 
-        super.stop()
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/suspicious/NCSuspiciousNounsEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/suspicious/NCSuspiciousNounsEnricher.scala
index be8f984..7f1ae3f 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/suspicious/NCSuspiciousNounsEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/enrichers/suspicious/NCSuspiciousNounsEnricher.scala
@@ -31,12 +31,21 @@ import scala.collection.Map
   * Suspicious words enricher.
   */
 object NCSuspiciousNounsEnricher extends NCProbeEnricher {
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
 
     @throws[NCE]
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/validate/NCValidateManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/validate/NCValidateManager.scala
index 862c7df..f0893e8 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/validate/NCValidateManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/probe/mgrs/nlp/validate/NCValidateManager.scala
@@ -30,18 +30,27 @@ import org.apache.nlpcraft.probe.mgrs.NCProbeModel
 object NCValidateManager extends NCService with LazyLogging {
     // Create new language finder singleton.
     @volatile private var langFinder: OptimaizeLangDetector = _
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         langFinder = new OptimaizeLangDetector()
         
         // Initialize language finder.
         langFinder.loadModels()
         
-        super.start()
+        ackStart()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
     
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/company/NCCompanyManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/company/NCCompanyManager.scala
index 5910260..ff5aaa8 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/company/NCCompanyManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/company/NCCompanyManager.scala
@@ -54,11 +54,20 @@ object NCCompanyManager extends NCService with NCIgniteInstance {
 
         tok
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span): Unit = startScopedSpan("start", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { span ⇒
         catching(wrapIE) {
             compSeq = NCSql.mkSeq(ignite, "compSeq", "nc_company", "id")
@@ -101,7 +110,7 @@ object NCCompanyManager extends NCService with NCIgniteInstance {
                 }
         }
 
-        super.start()
+        ackStart()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/feedback/NCFeedbackManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/feedback/NCFeedbackManager.scala
index 590fb64..a3996e0 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/feedback/NCFeedbackManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/feedback/NCFeedbackManager.scala
@@ -32,16 +32,25 @@ import scala.util.control.Exception._
 object NCFeedbackManager extends NCService with NCIgniteInstance {
     @volatile private var seq: IgniteAtomicSequence = _
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span): Unit = startScopedSpan("start", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         catching(wrapIE) {
             seq = NCSql.mkSeq(ignite, "feedbackSeq", "feedback", "id")
         }
 
-        super.start()
+        ackStart()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/geo/NCGeoManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/geo/NCGeoManager.scala
index b763dcd..2b55cc7 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/geo/NCGeoManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/geo/NCGeoManager.scala
@@ -80,32 +80,26 @@ object NCGeoManager extends NCService {
     case class YamlTopCity(name: String, region: String, country: String)
 
     /**
-      * Starts manager.
-      */
+     *
+     * @param parent Optional parent span.
+     * @throws NCE
+     * @return
+     */
     @throws[NCE]
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         model = readAndConstructModel(true)
 
-        super.start()
-    }
-
-    /**
-      * Starts manager. Method is public for generator.
-      */
-    @throws[NCE]
-    def start(extended: Boolean): NCService = {
-        model = readAndConstructModel(extended)
-
-        super.start()
+        ackStart()
     }
 
     /**
-      * Stops this component.
-      */
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
         model = null
 
-        super.stop()
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/geo/tools/NCGeoSyntheticNamesGenerator.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/geo/tools/NCGeoSyntheticNamesGenerator.scala
index 97d5d2e..37d030a 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/geo/tools/NCGeoSyntheticNamesGenerator.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/geo/tools/NCGeoSyntheticNamesGenerator.scala
@@ -31,7 +31,6 @@ import scala.collection._
   * Generator of additional synonyms for geo names.
   */
 object NCGeoSyntheticNamesGenerator extends App {
-
     // Base synonym should be saved for console debug message.
     case class Holder(base: String, var entries: Set[NCGeoEntry])
 
@@ -42,7 +41,7 @@ object NCGeoSyntheticNamesGenerator extends App {
             throw new NCE(s"Couldn't delete file: $file")
 
         NCDictionaryManager.start()
-        NCGeoManager.start(false)
+        NCGeoManager.start()
 
         val hs = mutable.Map.empty[String, Holder]
 
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/lifecycle/NCServerLifecycleManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/lifecycle/NCServerLifecycleManager.scala
index 49f1c0a..6e08e7e 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/lifecycle/NCServerLifecycleManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/lifecycle/NCServerLifecycleManager.scala
@@ -44,23 +44,32 @@ object NCServerLifecycleManager extends NCService {
     }
     
     Config.loadAndCheck()
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         if (Config.objects.isEmpty)
             logger.info("No lifecycle components configured.")
         else {
-            val tbl = NCAsciiTable("Class Name")
+            val tbl = NCAsciiTable("Class")
      
             Config.classes.foreach(tbl += _)
      
-            tbl.info(logger, Some(s"Following lifecycle components configured:"))
+            tbl.info(logger, Some(s"Configured lifecycle components:"))
         }
      
-        super.start()
+        ackStart()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
     
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/NCNlpNerEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/NCNlpNerEnricher.scala
index 7fc6091..bab0a96 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/NCNlpNerEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/NCNlpNerEnricher.scala
@@ -28,8 +28,8 @@ trait NCNlpNerEnricher extends NCService {
     /**
       *
       * @param ns
-      * @param enabledBuiltInTokens Set of enabled built-in token IDs.
+      * @param ebiTokens Set of enabled built-in (EBI) token IDs.
       * @param parent Optional parent span.
       */
-    def enrich(ns: NCNlpSentence, enabledBuiltInTokens: Set[String], parent: Span = null)
+    def enrich(ns: NCNlpSentence, ebiTokens: Set[String], parent: Span = null)
 }
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/NCNlpServerManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/NCNlpServerManager.scala
index d656342..2b51180 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/NCNlpServerManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/NCNlpServerManager.scala
@@ -60,6 +60,11 @@ object NCNlpServerManager extends NCService {
     private val isOpenNer: Boolean = Config.support("opennlp")
     private val isSpacyNer: Boolean = Config.support("spacy")
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { span ⇒
         addTags(span,
             "stanfordNer" → isStanfordNer,
@@ -112,9 +117,13 @@ object NCNlpServerManager extends NCService {
     
         logger.info(s"Enabled built-in NERs: ${Config.tokenProviders.mkString(", ")}")
     
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
         if (ners != null)
             ners.values.foreach(_.stop())
@@ -122,7 +131,7 @@ object NCNlpServerManager extends NCService {
         if (parser != null && parser.isStarted)
             parser.stop()
 
-        super.stop()
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/google/NCGoogleNerEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/google/NCGoogleNerEnricher.scala
index e61ff29..a95033e 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/google/NCGoogleNerEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/google/NCGoogleNerEnricher.scala
@@ -32,6 +32,11 @@ import scala.collection.JavaConverters._
 object NCGoogleNerEnricher extends NCService with NCNlpNerEnricher with NCIgniteInstance {
     @volatile private var srv: LanguageServiceClient = _
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         try {
             srv = LanguageServiceClient.create()
@@ -53,17 +58,27 @@ object NCGoogleNerEnricher extends NCService with NCNlpNerEnricher with NCIgnite
                 )
         }
 
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
         if (srv != null)
             srv.close()
         
-        super.stop()
+        ackStop()
     }
 
-    override def enrich(ns: NCNlpSentence, enabledBuiltInTokens: Set[String], parent: Span = null): Unit =
+    /**
+     *
+     * @param ns
+     * @param ebiTokens Set of enabled built-in token IDs.
+     * @param parent Optional parent span.
+     */
+    override def enrich(ns: NCNlpSentence, ebiTokens: Set[String], parent: Span = null): Unit =
         startScopedSpan("enrich", parent, "srvReqId" → ns.srvReqId, "txt" → ns.text) { _ ⇒
             try {
                 val resp = ask(ns.text)
@@ -73,7 +88,7 @@ object NCGoogleNerEnricher extends NCService with NCNlpNerEnricher with NCIgnite
                 resp.getEntitiesList.asScala.flatMap(e ⇒ {
                     val typLc = e.getType.toString.toLowerCase
     
-                    if (e.getMentionsList != null && enabledBuiltInTokens.contains(typLc)) {
+                    if (e.getMentionsList != null && ebiTokens.contains(typLc)) {
                         e.getMentionsList.asScala.flatMap(m ⇒ {
                             val span = m.getText
     
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/opennlp/NCOpenNlpNerEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/opennlp/NCOpenNlpNerEnricher.scala
index ccfe257..f0b14a3 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/opennlp/NCOpenNlpNerEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/opennlp/NCOpenNlpNerEnricher.scala
@@ -39,6 +39,11 @@ object NCOpenNlpNerEnricher extends NCService with NCNlpNerEnricher with NCIgnit
     @volatile private var nerFinders: Map[NameFinderME, String] = _
     @volatile private var cache: IgniteCache[String, Array[String]] = _
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { span ⇒
         require(NCOpenNlpTokenizer.isStarted)
 
@@ -71,16 +76,26 @@ object NCOpenNlpNerEnricher extends NCService with NCNlpNerEnricher with NCIgnit
             cache = ignite.cache[String, Array[String]]("opennlp-cache")
         }
 
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
         cache = null
     
-        super.stop()
+        ackStop()
     }
 
-    override def enrich(ns: NCNlpSentence, enabledBuiltInToks: Set[String], parent: Span = null): Unit =
+    /**
+     *
+     * @param ns
+     * @param ebiTokens
+     * @param parent Optional parent span.
+     */
+    override def enrich(ns: NCNlpSentence, ebiTokens: Set[String], parent: Span = null): Unit =
         startScopedSpan("enrich", parent, "srvReqId" → ns.srvReqId, "txt" → ns.text) { _ ⇒
             val normTxt = ns.text
     
@@ -105,7 +120,7 @@ object NCOpenNlpNerEnricher extends NCService with NCNlpNerEnricher with NCIgnit
                 this.
                     synchronized {
                         val res = nerFinders.
-                            filter { case (_, tokName) ⇒ enabledBuiltInToks.contains(tokName)}.
+                            filter { case (_, tokName) ⇒ ebiTokens.contains(tokName)}.
                             flatMap {
                                 case (finder, name) ⇒
                                     finder.find(words).map(p ⇒ Holder(p.getStart, p.getEnd - 1, name, p.getProb))
@@ -117,7 +132,7 @@ object NCOpenNlpNerEnricher extends NCService with NCNlpNerEnricher with NCIgnit
                     }.toSeq
     
             hs.
-                filter(h ⇒ enabledBuiltInToks.contains(h.name)).
+                filter(h ⇒ ebiTokens.contains(h.name)).
                 foreach(h ⇒ {
                     val t1 = ns.find(_.index == h.start)
                     val t2 = ns.find(_.index == h.end)
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/opennlp/NCOpenNlpParser.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/opennlp/NCOpenNlpParser.scala
index bd94850..fc9b624 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/opennlp/NCOpenNlpParser.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/opennlp/NCOpenNlpParser.scala
@@ -41,6 +41,11 @@ object NCOpenNlpParser extends NCService with NCNlpParser with NCIgniteInstance
     @volatile private var lemmatizer: DictionaryLemmatizer = _
     @volatile private var cache: IgniteCache[String, Array[String]] = _
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { span ⇒
         require(NCOpenNlpTokenizer.isStarted)
 
@@ -63,15 +68,25 @@ object NCOpenNlpParser extends NCService with NCNlpParser with NCIgniteInstance
             cache = ignite.cache[String, Array[String]]("opennlp-cache")
         }
 
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
         cache = null
 
-        super.stop()
+        ackStop()
     }
 
+    /**
+     *
+     * @param normTxt Normalized text.
+     * @param parent Optional parent span.
+     * @return Parsed words.
+     */
     override def parse(normTxt: String, parent: Span = null): Seq[NCNlpWord] =
         startScopedSpan("parse", parent, "normTxt" → normTxt) { _ ⇒
             // Can be optimized.
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/spacy/NCSpaCyNerEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/spacy/NCSpaCyNerEnricher.scala
index 5a6eb5d..bb4a181 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/spacy/NCSpaCyNerEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/core/spacy/NCSpaCyNerEnricher.scala
@@ -69,6 +69,11 @@ object NCSpaCyNerEnricher extends NCService with NCNlpNerEnricher with NCIgniteI
 
     @volatile private var url: String = _
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { span ⇒
         url = Config.proxyUrl
 
@@ -96,19 +101,23 @@ object NCSpaCyNerEnricher extends NCService with NCNlpNerEnricher with NCIgniteI
 
         logger.info(s"spaCy proxy connected: $url")
 
-        super.start()
+        ackStart()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
     
     /**
       *
       * @param ns
-      * @param enabledBuiltInToks Set of enabled built-in token IDs.
+      * @param ebiTokens Set of enabled built-in token IDs.
       */
-    override def enrich(ns: NCNlpSentence, enabledBuiltInToks: Set[String], parent: Span = null): Unit =
+    override def enrich(ns: NCNlpSentence, ebiTokens: Set[String], parent: Span = null): Unit =
         startScopedSpan("enrich", parent, "srvReqId" → ns.srvReqId, "txt" → ns.text) { _ ⇒
             val resp = getSync(Http().singleRequest(HttpRequest(uri = s"$url?text=${URLEncoder.encode(ns.text, "UTF-8")}")))
     
@@ -125,7 +134,7 @@ object NCSpaCyNerEnricher extends NCService with NCNlpNerEnricher with NCIgniteI
                             spans.foreach(span ⇒ {
                                 val nerLc = span.ner.toLowerCase
     
-                                if (enabledBuiltInToks.contains(nerLc)) {
+                                if (ebiTokens.contains(nerLc)) {
                                     val t1Opt = ns.find(_.startCharIndex == span.from)
                                     val t2Opt = ns.find(_.endCharIndex == span.from + span.text.length)
     
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/NCServerEnrichmentManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/NCServerEnrichmentManager.scala
index ebad1ee..5df450c 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/NCServerEnrichmentManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/NCServerEnrichmentManager.scala
@@ -239,6 +239,11 @@ object NCServerEnrichmentManager extends NCService with NCIgniteInstance {
             foreach { case (typ, toks) ⇒ ners(typ).enrich(ns, toks) }
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { span ⇒
         catching(wrapIE) {
             cache = ignite.cache[String, Holder]("sentence-cache")
@@ -261,12 +266,13 @@ object NCServerEnrichmentManager extends NCService with NCIgniteInstance {
         ners = NCNlpServerManager.getNers
         supportedProviders = ners.keySet ++ (if (Config.isBuiltInEnrichers) Set("nlpcraft") else Set.empty)
 
-        super.start()
+        ackStart()
     }
 
     /**
-      * Stops this manager.
-      */
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { span ⇒
         if (Config.isBuiltInEnrichers) {
             NCCoordinatesEnricher.stop(span)
@@ -281,7 +287,7 @@ object NCServerEnrichmentManager extends NCService with NCIgniteInstance {
         
         cache = null
         
-        super.stop()
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/basenlp/NCBaseNlpEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/basenlp/NCBaseNlpEnricher.scala
index 8d6f7b5..d27531d 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/basenlp/NCBaseNlpEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/basenlp/NCBaseNlpEnricher.scala
@@ -30,8 +30,9 @@ import scala.collection._
   * Base NLP enricher.
   */
 object NCBaseNlpEnricher extends NCServerEnricher {
-    // http://www.vidarholen.net/contents/interjections/
+    //noinspection SpellCheckingInspection
     private final val INTERJECTIONS =
+        // http://www.vidarholen.net/contents/interjections/
         Set(
             "aah", "aaah", "aaaahh", "aha", "a-ha", "ahem",
             "ahh", "ahhh", "argh", "augh", "aww", "aw",
@@ -71,16 +72,31 @@ object NCBaseNlpEnricher extends NCServerEnricher {
 
     @volatile private var parser: NCNlpParser = _
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         parser = NCNlpServerManager.getParser
         
-        super.start()
+        ackStart()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
-    
+
+    /**
+     *
+     * @param ns NLP sentence to enrich.
+     * @param parent Optional parent span.
+     * @throws NCE
+     */
     @throws[NCE]
     override def enrich(ns: NCNlpSentence, parent: Span = null) {
         startScopedSpan("enrich", parent, "srvReqId" → ns.srvReqId, "txt" → ns.text) { _ ⇒
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/coordinate/NCCoordinatesEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/coordinate/NCCoordinatesEnricher.scala
index 4930a94..e48f30a 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/coordinate/NCCoordinatesEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/coordinate/NCCoordinatesEnricher.scala
@@ -50,19 +50,28 @@ object NCCoordinatesEnricher extends NCServerEnricher {
 
     private final val SEPS = Seq(",", ";", "and")
     private final val EQUALS = Seq("=", "==", "is", "are", "equal")
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         latStems = Seq("lat", "latitude").map(NCNlpCoreManager.stem)
         lonStems = Seq("lon", "longitude").map(NCNlpCoreManager.stem)
 
-        super.start()
+        ackStart()
     }
-    
-    override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
+    override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
         latStems = null
         lonStems = null
+
+        ackStop()
     }
     
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/date/NCDateEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/date/NCDateEnricher.scala
index 55b2805..0fc3125 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/date/NCDateEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/date/NCDateEnricher.scala
@@ -22,7 +22,9 @@ import java.util.{Calendar ⇒ C}
 
 import io.opencensus.trace.Span
 import org.apache.nlpcraft.common.config.NCConfigurable
-import org.apache.nlpcraft.common.nlp.{NCNlpSentence, NCNlpSentenceNote, NCNlpSentenceToken}
+import org.apache.nlpcraft.common.nlp.{NCNlpSentence ⇒ Sentence}
+import org.apache.nlpcraft.common.nlp.{NCNlpSentenceNote ⇒ Note}
+import org.apache.nlpcraft.common.nlp.{NCNlpSentenceToken ⇒ Token}
 import org.apache.nlpcraft.common.{NCService, _}
 import org.apache.nlpcraft.server.nlp.enrichers.NCServerEnricher
 import org.apache.nlpcraft.server.nlp.enrichers.date.NCDateConstants._
@@ -61,7 +63,7 @@ object NCDateEnricher extends NCServerEnricher {
 
     // Function's data holder.
     case class F(
-        tokens: Seq[NCNlpSentenceToken],
+        tokens: Seq[Token],
         body: String,
         isFull: Boolean,
         var isProcessed: Boolean = false) {
@@ -86,7 +88,7 @@ object NCDateEnricher extends NCServerEnricher {
     case class CRD(
         from: F,
         to: F,
-        dash: Seq[NCNlpSentenceToken]
+        dash: Seq[Token]
     )
 
     case class CR(
@@ -100,23 +102,29 @@ object NCDateEnricher extends NCServerEnricher {
 
     // Time holder.
     case class T(
-        tokens: Seq[NCNlpSentenceToken],
+        tokens: Seq[Token],
         body: Option[String]
     )
 
     // Time period holder.
     case class TP(
-        tokens: Seq[NCNlpSentenceToken],
+        tokens: Seq[Token],
         body: String
     )
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
-    
+
     /**
-      * Starts manager.
-      */
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { span ⇒
         def read(res: String): LHM_SS = {
             startScopedSpan("read", span, "res" → res) { _ ⇒
@@ -133,14 +141,13 @@ object NCDateEnricher extends NCServerEnricher {
             }
         }
 
-        val file =
-            Config.style match {
-                case MDY ⇒ "parts_mdy.txt.gz"
-                case DMY ⇒ "parts_dmy.txt.gz"
-                case YMD ⇒ "parts_ymd.txt.gz"
+        val file = Config.style match {
+            case MDY ⇒ "parts_mdy.txt.gz"
+            case DMY ⇒ "parts_dmy.txt.gz"
+            case YMD ⇒ "parts_ymd.txt.gz"
 
-                case _  ⇒ throw new AssertionError(s"Unexpected format type: ${Config.style}")
-            }
+            case _  ⇒ throw new AssertionError(s"Unexpected format type: ${Config.style}")
+        }
 
         var p1: LHM_SS = null
         var p2: LHM_SS = null
@@ -153,11 +160,17 @@ object NCDateEnricher extends NCServerEnricher {
 
         cacheParts = p1 ++ p2
 
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param ns NLP sentence to enrich.
+     * @param parent Optional parent span.
+     * @throws NCE
+     */
     @throws[NCE]
-    override def enrich(ns: NCNlpSentence, parent: Span = null) {
+    override def enrich(ns: Sentence, parent: Span = null) {
         // This stage must not be 1st enrichment stage.
         assume(ns.nonEmpty)
         
@@ -197,8 +210,8 @@ object NCDateEnricher extends NCServerEnricher {
                 buf
             }
     
-            def isDash(toks: Seq[NCNlpSentenceToken]): Boolean = {
-                def isDashChar(t: NCNlpSentenceToken): Boolean = t.origText.forall(ch ⇒ DASHES.contains(ch) || DASHES_LIKE.contains(ch))
+            def isDash(toks: Seq[Token]): Boolean = {
+                def isDashChar(t: Token): Boolean = t.origText.forall(ch ⇒ DASHES.contains(ch) || DASHES_LIKE.contains(ch))
         
                 toks.exists(isDashChar) && toks.forall(t ⇒ t.isStopWord || isDashChar(t))
             }
@@ -235,7 +248,7 @@ object NCDateEnricher extends NCServerEnricher {
                 buf
             }
     
-            def withBefore(tokens: Seq[NCNlpSentenceToken], lenBefore: Int) =
+            def withBefore(tokens: Seq[Token], lenBefore: Int) =
                 ns.take(tokens.head.index).filter(!_.isStopWord).takeRight(lenBefore) ++ tokens
     
             /*
@@ -302,15 +315,15 @@ object NCDateEnricher extends NCServerEnricher {
 
     private def mkBetweenPrepositions(seq: Seq[(String, String)]): Seq[(P, P)] = seq.map(t ⇒ P(t._1) → P(t._2))
 
-    private def areSuitableTokens(buf: mutable.Buffer[Set[NCNlpSentenceToken]], toks: Seq[NCNlpSentenceToken]): Boolean =
+    private def areSuitableTokens(buf: mutable.Buffer[Set[Token]], toks: Seq[Token]): Boolean =
         toks.forall(t ⇒ !t.isQuoted && !t.isBracketed) && !buf.exists(_.exists(toks.contains))
 
-    private def findDates(ns: NCNlpSentence): Seq[F] = {
-        val buf = mutable.Buffer.empty[Set[NCNlpSentenceToken]]
+    private def findDates(ns: Sentence): Seq[F] = {
+        val buf = mutable.Buffer.empty[Set[Token]]
         val res = mutable.Buffer.empty[F]
 
         for (toks ← ns.tokenMixWithStopWords()) {
-            def process(toks: Seq[NCNlpSentenceToken]): Unit = {
+            def process(toks: Seq[Token]): Unit = {
                 if (areSuitableTokens(buf, toks)) {
                     val s = toks.map(_.normText).mkString(" ")
 
@@ -350,8 +363,8 @@ object NCDateEnricher extends NCServerEnricher {
       * @param toks
       * @return
       */
-    private def mkNote(range: NCDateRange, from: Int, to: Int, toks: Seq[NCNlpSentenceToken]): NCNlpSentenceNote =
-        NCNlpSentenceNote(
+    private def mkNote(range: NCDateRange, from: Int, to: Int, toks: Seq[Token]): Note =
+        Note(
             toks.map(_.index),
             "nlpcraft:date",
             "from" → range.from,
@@ -363,7 +376,7 @@ object NCDateEnricher extends NCServerEnricher {
         body: String,
         fromIncl: Boolean,
         toIncl: Boolean,
-        tokens: Seq[NCNlpSentenceToken],
+        tokens: Seq[Token],
         base: Long) {
         val note = mkNote(
             NCDateParser.calculate(body, base, fromIncl, toIncl).mkInclusiveDateRange,
@@ -377,15 +390,15 @@ object NCDateEnricher extends NCServerEnricher {
 
     private def mark(processed: F*): Unit = processed.foreach(_.isProcessed = true)
 
-    private def collapse(ns: NCNlpSentence) {
+    private def collapse(ns: Sentence) {
         removeDuplicates(ns)
         collapsePeriods(ns)
         removeDuplicates(ns)
     }
 
-    private def isValidRange(n: NCNlpSentenceNote): Boolean = n("from").asInstanceOf[Long] < n("to").asInstanceOf[Long]
+    private def isValidRange(n: Note): Boolean = n("from").asInstanceOf[Long] < n("to").asInstanceOf[Long]
 
-    private def collapsePeriods(ns: NCNlpSentence) {
+    private def collapsePeriods(ns: Sentence) {
         // a) Months and years.
         // 1. "m", "m"... "y, m" → fix year for firsts; try to union all.
         // Example: January, February of 2009.
@@ -421,10 +434,10 @@ object NCDateEnricher extends NCServerEnricher {
         // Example: Monday, Tuesday.
         
         for (neighbours ← findNeighbours(ns, andSupport = true)) {
-            val buf = mutable.Buffer.empty[Seq[NCNlpSentenceNote]]
+            val buf = mutable.Buffer.empty[Seq[Note]]
 
             // Creates all neighbours' sequences starting from longest.
-            val combs: Seq[Seq[NCNlpSentenceNote]] = (2 to neighbours.length).reverse.flatMap(i ⇒ neighbours.sliding(i))
+            val combs: Seq[Seq[Note]] = (2 to neighbours.length).reverse.flatMap(i ⇒ neighbours.sliding(i))
 
             for (comb ← combs if !buf.exists(p ⇒ p.exists(p ⇒ comb.contains(p)))) {
                 val first = comb.head
@@ -434,8 +447,8 @@ object NCDateEnricher extends NCServerEnricher {
 
                 def fixField(
                     field: Int,
-                    seq: Seq[NCNlpSentenceNote],
-                    base: NCNlpSentenceNote,
+                    seq: Seq[Note],
+                    base: Note,
                     isBefore: Boolean = false,
                     isAfter: Boolean = false) = {
                     val r = mkDateRange(base)
@@ -507,13 +520,13 @@ object NCDateEnricher extends NCServerEnricher {
     }
 
     private def compressNotes(
-        ns: NCNlpSentence,
-        notes: Seq[NCNlpSentenceNote],
-        before: Option[NCNlpSentenceNote] = None,
-        after: Option[NCNlpSentenceNote] = None): Boolean = {
+        ns: Sentence,
+        notes: Seq[Note],
+        before: Option[Note] = None,
+        after: Option[Note] = None): Boolean = {
 
         if (nearRanges(notes)) {
-            def getSeq(optH: Option[NCNlpSentenceNote]): Seq[NCNlpSentenceNote] =
+            def getSeq(optH: Option[Note]): Seq[Note] =
                 optH match {
                     case Some(h) ⇒ Seq(h)
                     case None ⇒ Seq.empty
@@ -541,12 +554,12 @@ object NCDateEnricher extends NCServerEnricher {
     }
 
     private def compressAndRemoveNotes(
-        ns: NCNlpSentence,
-        seq: Seq[NCNlpSentenceNote],
-        before: Option[NCNlpSentenceNote] = None,
-        after: Option[NCNlpSentenceNote] = None) {
+        ns: Sentence,
+        seq: Seq[Note],
+        before: Option[Note] = None,
+        after: Option[Note] = None) {
         if (!compressNotes(ns, seq, before, after)) {
-            def remove(nOpt: Option[NCNlpSentenceNote]): Unit =
+            def remove(nOpt: Option[Note]): Unit =
                 nOpt match {
                     case Some(h) ⇒ ns.removeNote(h)
                     case None ⇒ // No-op.
@@ -557,10 +570,10 @@ object NCDateEnricher extends NCServerEnricher {
         }
     }
 
-    private def findNeighbours(ns: NCNlpSentence, andSupport: Boolean): Seq[Seq[NCNlpSentenceNote]] = {
+    private def findNeighbours(ns: Sentence, andSupport: Boolean): Seq[Seq[Note]] = {
         val hs = ns.getNotes("nlpcraft:date").sortBy(_.tokenFrom)
 
-        case class Wrapper(holder: NCNlpSentenceNote, var group: Int)
+        case class Wrapper(holder: Note, var group: Int)
 
         val wrappers = hs.map(Wrapper(_, 0))
 
@@ -591,7 +604,7 @@ object NCDateEnricher extends NCServerEnricher {
         hs.groupBy(grouped(_)).toSeq.sortBy(_._1).map(_._2).filter(_.size > 1)
     }
 
-    private def removeDuplicates(ns: NCNlpSentence): Unit = {
+    private def removeDuplicates(ns: Sentence): Unit = {
         val notes = findNeighbours(ns, andSupport = false).flatMap(g ⇒ {
             case class H(from: Long, to: Long) {
                 override def equals(obj: scala.Any): Boolean = obj match {
@@ -603,10 +616,10 @@ object NCDateEnricher extends NCServerEnricher {
             }
 
             // Neighbours grouped by equal date ranges.
-            val grouped: Map[H, Seq[NCNlpSentenceNote]] = g.groupBy(h ⇒ H(h("from").asInstanceOf[Long], h("to").asInstanceOf[Long]))
+            val grouped: Map[H, Seq[Note]] = g.groupBy(h ⇒ H(h("from").asInstanceOf[Long], h("to").asInstanceOf[Long]))
 
             // Groups ordered to keep node with maximum information (max periods count in date).
-            val hs: Iterable[Seq[NCNlpSentenceNote]] =
+            val hs: Iterable[Seq[Note]] =
                 grouped.map(_._2.sortBy(h ⇒ -h("periods").asInstanceOf[java.util.List[String]].asScala.length))
 
             // First holder will be kept in group, others (tail) should be deleted.
@@ -624,7 +637,7 @@ object NCDateEnricher extends NCServerEnricher {
         c
     }
 
-    private def mkSumRange(notes: Seq[NCNlpSentenceNote]): NCDateRange =
+    private def mkSumRange(notes: Seq[Note]): NCDateRange =
         notes.size match {
             case 0 ⇒ throw new AssertionError("Unexpected empty notes")
             case 1 ⇒ mkDateRange(notes.head)
@@ -639,17 +652,13 @@ object NCDateEnricher extends NCServerEnricher {
                     mkDateRange(notes.head, notes.last) // Summary.
         }
 
-    private def mkDateRange(n1: NCNlpSentenceNote, n2: NCNlpSentenceNote): NCDateRange =
-        NCDateRange(n1("from").asInstanceOf[Long], n2("to").asInstanceOf[Long])
-
-    private def mkDateRange(n: NCNlpSentenceNote): NCDateRange = mkDateRange(n, n)
+    private def mkDateRange(n1: Note, n2: Note): NCDateRange = NCDateRange(n1("from").asInstanceOf[Long], n2("to").asInstanceOf[Long])
+    private def mkDateRange(n: Note): NCDateRange = mkDateRange(n, n)
     private def getField(d: Long, field: Int): Int = mkCalendar(d).get(field)
-    private def equalHolder(h: NCNlpSentenceNote, ps: String*): Boolean =
-        h("periods").asInstanceOf[java.util.List[String]].asScala.sorted == ps.sorted
-    private def equalHolders(hs: Seq[NCNlpSentenceNote], ps: String*): Boolean = hs.forall(equalHolder(_, ps: _*))
+    private def equalHolder(h: Note, ps: String*): Boolean = h("periods").asInstanceOf[java.util.List[String]].asScala.sorted == ps.sorted
+    private def equalHolders(hs: Seq[Note], ps: String*): Boolean = hs.forall(equalHolder(_, ps: _*))
     private def getPrevious[T](s: T, seq: Seq[T]): T = seq(seq.indexOf(s) - 1)
-
-    private def nearRanges(ns: Seq[NCNlpSentenceNote]): Boolean =
+    private def nearRanges(ns: Seq[Note]): Boolean =
         ns.forall(
             n ⇒ if (n == ns.head) true else getPrevious(n, ns)("to").asInstanceOf[Long] == n("from").asInstanceOf[Long]
         )
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/geo/NCGeoEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/geo/NCGeoEnricher.scala
index 471518b..9c9dc9c 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/geo/NCGeoEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/geo/NCGeoEnricher.scala
@@ -76,15 +76,24 @@ object NCGeoEnricher extends NCServerEnricher {
     private def getGeoNotes(ns: NCNlpSentence): Set[NCNlpSentenceNote] = GEO_TYPES.flatMap(ns.getNotes)
     private def getGeoNotes(t: NCNlpSentenceToken): Set[NCNlpSentenceNote] = GEO_TYPES.flatMap(t.getNotes)
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
-
         commons = null
         topUsa = null
         topWorld = null
         locations = null
+
+        ackStop()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         locations = NCGeoManager.getModel.synonyms
 
@@ -117,9 +126,15 @@ object NCGeoEnricher extends NCServerEnricher {
         topUsa = readCities(US_TOP_PATH).map(city ⇒ glue(city.name, city.region)).toSet
         topWorld = readCities(WORLD_TOP_PATH).map(city ⇒ glue(city.name, city.region)).toSet
 
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param ns NLP sentence to enrich.
+     * @param parent Optional parent span.
+     * @throws NCE
+     */
     @throws[NCE]
     override def enrich(ns: NCNlpSentence, parent: Span = null): Unit =
         startScopedSpan("enrich", parent, "srvReqId" → ns.srvReqId, "txt" → ns.text) { _ ⇒
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/numeric/NCNumericEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/numeric/NCNumericEnricher.scala
index 1955558..25a4fb8 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/numeric/NCNumericEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/numeric/NCNumericEnricher.scala
@@ -172,13 +172,22 @@ object NCNumericEnricher extends NCServerEnricher {
         ("since", "till") → BETWEEN_INCLUSIVE,
         ("from", "till") → BETWEEN_INCLUSIVE
     )
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
-        super.start()
+        ackStart()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
 
     private def mkMap(seq: Seq[String], c: T): Map[String, P] =
@@ -221,6 +230,12 @@ object NCNumericEnricher extends NCServerEnricher {
         NCNlpSentenceNote(toks.map(_.index), "nlpcraft:num", params:_*)
     }
 
+    /**
+     *
+     * @param ns NLP sentence to enrich.
+     * @param parent Optional parent span.
+     * @throws NCE
+     */
     @throws[NCE]
     override def enrich(ns: NCNlpSentence, parent: Span = null): Unit =
         startScopedSpan("enrich", parent, "srvReqId" → ns.srvReqId, "txt" → ns.text) { _ ⇒
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/quote/NCQuoteEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/quote/NCQuoteEnricher.scala
index 2d97843..c6a6dad 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/quote/NCQuoteEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/quote/NCQuoteEnricher.scala
@@ -30,14 +30,29 @@ import scala.collection._
  * Quote enricher.
  */
 object NCQuoteEnricher extends NCServerEnricher {
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
-        super.start()
+        ackStart()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
-    
+
+    /**
+     *
+     * @param ns NLP sentence to enrich.
+     * @param parent Optional parent span.
+     * @throws NCE
+     */
     @throws[NCE]
     override def enrich(ns: NCNlpSentence, parent: Span = null) {
         startScopedSpan("enrich", parent, "srvReqId" → ns.srvReqId, "txt" → ns.text) { _ ⇒
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/stopword/NCStopWordEnricher.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/stopword/NCStopWordEnricher.scala
index 232a788..6d174e9 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/stopword/NCStopWordEnricher.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/stopword/NCStopWordEnricher.scala
@@ -645,6 +645,12 @@ object NCStopWordEnricher extends NCServerEnricher {
         }
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @throws NCE
+     * @return
+     */
     @throws[NCE]
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         percents = Set(
@@ -674,10 +680,14 @@ object NCStopWordEnricher extends NCServerEnricher {
         stopWords = m(false)
         exceptions = m(true)
 
-        super.start()
+        ackStart()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
 }
\ No newline at end of file
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/preproc/NCPreProcessManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/preproc/NCPreProcessManager.scala
index a0852b7..b898f87 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/preproc/NCPreProcessManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/preproc/NCPreProcessManager.scala
@@ -81,13 +81,22 @@ object NCPreProcessManager extends NCService {
             "you're" → "you are",
             "you've" → "you have"
         ).map(p ⇒ p._1 → p._2.split(' ').toSeq)
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span): NCService = startScopedSpan("start", parent) { _ ⇒
-        super.start()
+        ackStart()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
     
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/spell/NCSpellCheckManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/spell/NCSpellCheckManager.scala
index 9cbe995..437f568 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/spell/NCSpellCheckManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/spell/NCSpellCheckManager.scala
@@ -43,7 +43,12 @@ object NCSpellCheckManager extends NCService {
             s.capitalize
         else
             s // Full lower case by default.
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span): NCService = startScopedSpan("start", parent) { _ ⇒
         dict = U.extractYamlString(
             NCExternalConfigManager.getContent(SPELL, RESOURCE),
@@ -54,13 +59,17 @@ object NCSpellCheckManager extends NCService {
 
         logger.debug(s"Spell checker dictionary loaded: ${dict.size} entries")
 
-        super.start()
+        ackStart()
     }
-    
-    override def stop(parent: Span): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
+    override def stop(parent: Span): Unit = startScopedSpan("stop", parent) { _ ⇒
         dict = null
+
+        ackStop()
     }
     
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/wordnet/NCWordNetManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/wordnet/NCWordNetManager.scala
index cf3d86b..3cefbc3 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/wordnet/NCWordNetManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/wordnet/NCWordNetManager.scala
@@ -65,16 +65,26 @@ object NCWordNetManager extends NCService {
             Seq.empty[String]
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @throws NCE
+     * @return
+     */
     @throws[NCE]
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         dic =  Dictionary.getDefaultResourceInstance
         morph = dic.getMorphologicalProcessor
 
-        super.start()
+        ackStart()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
     
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/probe/NCProbeManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/probe/NCProbeManager.scala
index 4181d82..46c4db4 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/probe/NCProbeManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/probe/NCProbeManager.scala
@@ -163,9 +163,10 @@ object NCProbeManager extends NCService {
     @volatile private var modelsInfo: ConcurrentHashMap[String, Promise[java.util.Map[String, AnyRef]]] = _
 
     /**
-      *
-      * @return
-      */
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { span ⇒
         probes = mutable.HashMap.empty[ProbeKey, ProbeHolder]
         mdls = mutable.HashMap.empty[String, NCProbeModelMdo]
@@ -204,12 +205,13 @@ object NCProbeManager extends NCService {
         
         pingSrv.start()
         
-        super.start()
+        ackStart()
     }
-    
+
     /**
-      *
-      */
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
         isStopping = new AtomicBoolean(true)
 
@@ -219,7 +221,7 @@ object NCProbeManager extends NCService {
 
         modelsInfo = null
      
-        super.stop()
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/proclog/NCProcessLogManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/proclog/NCProcessLogManager.scala
index e7d7f90..067b890 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/proclog/NCProcessLogManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/proclog/NCProcessLogManager.scala
@@ -36,6 +36,11 @@ object NCProcessLogManager extends NCService with NCIgniteInstance {
     @volatile private var logSeq: IgniteAtomicSequence = _
     @volatile private var logLock: IgniteSemaphore = _
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span): NCService = startScopedSpan("start", parent) { _ ⇒
         catching(wrapIE) {
             logSeq = NCSql.mkSeq(ignite, "logSeq", "proc_log", "id")
@@ -43,11 +48,15 @@ object NCProcessLogManager extends NCService with NCIgniteInstance {
             logLock = ignite.semaphore("logSemaphore", 1, true, true)
         }
      
-        super.start()
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
     
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/query/NCQueryManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/query/NCQueryManager.scala
index dc04e7f..ae444a6 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/query/NCQueryManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/query/NCQueryManager.scala
@@ -52,7 +52,12 @@ object NCQueryManager extends NCService with NCIgniteInstance with NCOpenCensusS
     @volatile private var asyncAsks: ConcurrentHashMap[String, Promise[NCQueryStateMdo]] = _
     
     private final val MAX_WORDS = 100
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         asyncAsks = new ConcurrentHashMap[String/*Server request ID*/, Promise[NCQueryStateMdo]]()
 
@@ -86,11 +91,15 @@ object NCQueryManager extends NCService with NCIgniteInstance with NCOpenCensusS
         
         require(cache != null)
         
-        super.start()
+        ackStart()
     }
-    
+
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
     
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/rest/NCRestManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/rest/NCRestManager.scala
index 08e8432..6fc2a3e 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/rest/NCRestManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/rest/NCRestManager.scala
@@ -68,8 +68,10 @@ object NCRestManager extends NCService {
     Config.check()
 
     /**
-      * Starts this component.
-      */
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { span ⇒
         val url = s"${Config.host}:${Config.port}"
         val api: NCRestApi = U.mkObject(Config.apiImpl)
@@ -86,16 +88,17 @@ object NCRestManager extends NCService {
             case Failure(_) ⇒ logger.info(s"REST server failed to start on '$url'.")
         }
 
-        super.start()
+        ackStart()
     }
 
     /**
-      * Stops this component.
-      */
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
         if (bindFut != null)
             bindFut.flatMap(_.unbind()).onComplete(_ ⇒ SYSTEM.terminate())
 
-        super.stop()
+        ackStop()
     }
 }
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sql/NCSqlManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sql/NCSqlManager.scala
index a09cfa8..dea9c5d 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sql/NCSqlManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sql/NCSqlManager.scala
@@ -52,8 +52,11 @@ object NCSqlManager extends NCService with NCIgniteInstance {
     @volatile private var usersPropsSeq: IgniteAtomicSequence = _
 
     /**
-      * Starts manager.
-      */
+     *
+     * @param parent Optional parent span.
+     * @throws NCE
+     * @return
+     */
     @throws[NCE]
     override def start(parent: Span): NCService = startScopedSpan("start", parent) { span ⇒
         addTags(span, "isIgniteDb" → NCSql.isIgniteDb)
@@ -65,14 +68,15 @@ object NCSqlManager extends NCService with NCIgniteInstance {
             usersPropsSeq = NCSql.mkSeq(ignite, "usersPropsSeq", "nc_user_property", "id")
         }
      
-        super.start()
+        ackStart()
     }
 
     /**
-      * Stop manager.
-      */
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop()
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sugsyn/NCSuggestSynonymManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sugsyn/NCSuggestSynonymManager.scala
index d7f524e..8acee4d 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sugsyn/NCSuggestSynonymManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sugsyn/NCSuggestSynonymManager.scala
@@ -112,20 +112,29 @@ object NCSuggestSynonymManager extends NCService {
     private def toStem(s: String): String = split(s).map(NCNlpPorterStemmer.stem).mkString(" ")
     private def toStemWord(s: String): String = NCNlpPorterStemmer.stem(s)
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span): NCService = startScopedSpan("start", parent) { _ ⇒
         pool = Executors.newCachedThreadPool()
         executor = ExecutionContext.fromExecutor(pool)
 
-        super.start(parent)
+        ackStart()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span): Unit = startScopedSpan("stop", parent) { _ ⇒
-        super.stop(parent)
-
         U.shutdownPools(pool)
 
         pool = null
         executor = null
+
+        ackStop()
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/tx/NCTxManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/tx/NCTxManager.scala
index e9789e0..0ce81f5 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/tx/NCTxManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/tx/NCTxManager.scala
@@ -45,27 +45,30 @@ object NCTxManager extends NCService with NCIgniteInstance {
       * @return
       */
     private var itx: IgniteTransactions = _
-    
+
     /**
-      * Stops this component.
-      */
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span = null): Unit = startScopedSpan("stop", parent) { _ ⇒
         // Close all still attached JDBC connections on stop.
         if (cons != null)
             cons.values.foreach(U.close)
         
-        super.stop()
+        ackStop()
     }
 
-
     /**
-      * Starts this component.
-      */
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { _ ⇒
         cons = mutable.HashMap.empty[IgniteUuid, Connection]
+
         itx = ignite.transactions()
 
-        super.start()
+        ackStart()
     }
 
     /**
@@ -106,14 +109,8 @@ object NCTxManager extends NCService with NCIgniteInstance {
     def attach(con: Connection): Unit = {
         val x = tx()
         
-        if (x != null) {
-            val xid = x.xid()
-    
-            cons.synchronized {
-                if (!cons.contains(xid))
-                    cons += xid → con
-            }
-        }
+        if (x != null)
+            attach(x, con)
     }
 
     /**
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/user/NCUserManager.scala b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/user/NCUserManager.scala
index fc1c63a..c984e36 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/user/NCUserManager.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/user/NCUserManager.scala
@@ -94,6 +94,10 @@ object NCUserManager extends NCService with NCIgniteInstance {
 
     Config.check()
 
+    /**
+     *
+     * @param parent Optional parent span.
+     */
     override def stop(parent: Span): Unit = startScopedSpan("stop", parent) { _ ⇒
         if (scanner != null)
             scanner.cancel()
@@ -103,9 +107,14 @@ object NCUserManager extends NCService with NCIgniteInstance {
         tokenSigninCache = null
         idSigninCache = null
 
-        super.stop()
+        ackStop()
     }
 
+    /**
+     *
+     * @param parent Optional parent span.
+     * @return
+     */
     override def start(parent: Span = null): NCService = startScopedSpan("start", parent) { span ⇒
         addTags(
             span,
@@ -170,7 +179,7 @@ object NCUserManager extends NCService with NCIgniteInstance {
         logger.info(s"Access tokens will be scanned for timeout every ${Config.timeoutScannerFreqMins}m.")
         logger.info(s"Access tokens inactive for >= ${Config.accessTokenExpireTimeoutMins}m will be invalidated.")
 
-        super.start()
+        ackStart()
     }
 
     /**