You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tika.apache.org by ta...@apache.org on 2021/02/04 17:07:09 UTC

[tika] branch main updated: TIKA-3292 -- swap jackson for gson where trivial

This is an automated email from the ASF dual-hosted git repository.

tallison pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tika.git


The following commit(s) were added to refs/heads/main by this push:
     new 2306199  TIKA-3292 -- swap jackson for gson where trivial
2306199 is described below

commit 23061995bdd2a9fbd58744ae65405fa61c5fd0da
Author: tballison <ta...@apache.org>
AuthorDate: Thu Feb 4 12:06:55 2021 -0500

    TIKA-3292 -- swap jackson for gson where trivial
---
 tika-bundles/tika-bundle-classic/pom.xml           |   2 -
 tika-eval/tika-eval-core/pom.xml                   |   6 +-
 .../textstats/CompositeTextStatsCalculator.java    |   1 +
 .../eval/core/tokens/AnalyzerDeserializer.java     | 131 +++++++++------------
 .../tika/eval/core/tokens/AnalyzerManager.java     |  24 ++--
 tika-langdetect/tika-langdetect-lingo24/pom.xml    |   6 +-
 .../langdetect/lingo24/Lingo24LangDetector.java    |  30 +++--
 tika-langdetect/tika-langdetect-mitll-text/pom.xml |  10 +-
 .../tika/langdetect/mitll/TextLangDetector.java    |  15 ++-
 9 files changed, 104 insertions(+), 121 deletions(-)

diff --git a/tika-bundles/tika-bundle-classic/pom.xml b/tika-bundles/tika-bundle-classic/pom.xml
index c156789..a5a84d6 100644
--- a/tika-bundles/tika-bundle-classic/pom.xml
+++ b/tika-bundles/tika-bundle-classic/pom.xml
@@ -379,8 +379,6 @@
               sun.java2d.cmm.kcms;resolution:=optional,
               sun.misc;resolution:=optional,
               com.jmatio.io;resolution:=optional,
-              com.google.gson;resolution:=optional,
-              com.google.gson.reflect;resolution:=optional,
               colorspace;resolution:=optional,
               com.sun.jna;resolution:=optional,
               com.sun.jna.ptr;resolution:=optional,
diff --git a/tika-eval/tika-eval-core/pom.xml b/tika-eval/tika-eval-core/pom.xml
index 664ef2d..791874c 100644
--- a/tika-eval/tika-eval-core/pom.xml
+++ b/tika-eval/tika-eval-core/pom.xml
@@ -44,9 +44,9 @@
             <version>${project.version}</version>
         </dependency>
         <dependency>
-            <groupId>com.google.code.gson</groupId>
-            <artifactId>gson</artifactId>
-            <version>${gson.version}</version>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-databind</artifactId>
+            <version>${jackson.version}</version>
         </dependency>
         <dependency>
             <groupId>commons-codec</groupId>
diff --git a/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/textstats/CompositeTextStatsCalculator.java b/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/textstats/CompositeTextStatsCalculator.java
index 051f69d..e3c95b9 100644
--- a/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/textstats/CompositeTextStatsCalculator.java
+++ b/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/textstats/CompositeTextStatsCalculator.java
@@ -30,6 +30,7 @@ import org.apache.lucene.util.BytesRef;
 import org.apache.tika.eval.core.langid.LanguageIDWrapper;
 import org.apache.tika.eval.core.tokens.AnalyzerManager;
 import org.apache.tika.eval.core.tokens.TokenCounts;
+import org.apache.tika.exception.TikaException;
 import org.apache.tika.language.detect.LanguageResult;
 
 
diff --git a/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/tokens/AnalyzerDeserializer.java b/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/tokens/AnalyzerDeserializer.java
index 6e8999a..1cd869b 100644
--- a/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/tokens/AnalyzerDeserializer.java
+++ b/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/tokens/AnalyzerDeserializer.java
@@ -18,26 +18,22 @@ package org.apache.tika.eval.core.tokens;
 
 
 import java.io.IOException;
-import java.lang.reflect.Type;
+import java.io.Reader;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
-import com.google.gson.JsonArray;
-import com.google.gson.JsonDeserializationContext;
-import com.google.gson.JsonDeserializer;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonParseException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.custom.CustomAnalyzer;
-import org.apache.lucene.analysis.util.CharFilterFactory;
 import org.apache.lucene.analysis.util.ClasspathResourceLoader;
 import org.apache.lucene.analysis.util.TokenFilterFactory;
 
-class AnalyzerDeserializer implements JsonDeserializer<Map<String, Analyzer>> {
+class AnalyzerDeserializer {
 
 
     private static final String ANALYZERS = "analyzers";
@@ -48,38 +44,16 @@ class AnalyzerDeserializer implements JsonDeserializer<Map<String, Analyzer>> {
     private static final String PARAMS = "params";
     private static final String COMMENT = "_comment";
 
-    private final int maxTokens;
-
-    AnalyzerDeserializer(int maxTokens) {
-        this.maxTokens = maxTokens;
-    }
-
-    @Override
-    public Map<String, Analyzer> deserialize(JsonElement element, Type type,
-                                             JsonDeserializationContext jsonDeserializationContext) throws JsonParseException {
-        if (! element.isJsonObject()) {
-            throw new IllegalArgumentException("Expecting top level 'analyzers:{}'");
-        }
-
-        JsonElement root = element.getAsJsonObject().get(ANALYZERS);
-        if (root == null) {
-            throw new IllegalArgumentException("Expecting top level 'analyzers:{}");
-        }
-        try {
-            return buildAnalyzers(root, maxTokens);
-        } catch (IOException e) {
-            throw new RuntimeException(e);
-        }
-
-    }
+    public static Map<String, Analyzer> buildAnalyzers(Reader reader, int maxTokens) throws IOException {
+        JsonNode root = new ObjectMapper().readTree(reader);
+        Map<String, Analyzer> analyzers = new HashMap<>();
 
-    public static Map<String, Analyzer> buildAnalyzers(JsonElement value, int maxTokens) throws IOException {
-        if (! value.isJsonObject()) {
-            throw new IllegalArgumentException("Expecting map with analyzer names/analyzer definitions");
+        if (! root.isObject() || root.get(ANALYZERS) == null) {
+            throw new IllegalArgumentException(
+                    "root object must be object with an 'analyzers' element");
         }
-        Map<String, Analyzer> analyzers = new HashMap<>();
-        JsonObject root = (JsonObject)value;
-        for (Map.Entry<String, JsonElement> e : root.entrySet()) {
+        for (Iterator<Map.Entry<String, JsonNode>> it = root.get(ANALYZERS).fields(); it.hasNext(); ) {
+            Map.Entry<String, JsonNode> e = it.next();
             String analyzerName = e.getKey();
             Analyzer analyzer = buildAnalyzer(analyzerName, e.getValue(), maxTokens);
             analyzers.put(analyzerName, analyzer);
@@ -87,13 +61,14 @@ class AnalyzerDeserializer implements JsonDeserializer<Map<String, Analyzer>> {
         return analyzers;
     }
 
-    public static Analyzer buildAnalyzer(String analyzerName, JsonElement value, int maxTokens) throws IOException {
-        if (! value.isJsonObject()) {
+    public static Analyzer buildAnalyzer(String analyzerName, JsonNode node, int maxTokens) throws IOException {
+        if (!node.isObject()) {
             throw new IllegalArgumentException("Expecting map of charfilter, tokenizer, tokenfilters");
         }
-        JsonObject aRoot = (JsonObject)value;
+
         CustomAnalyzer.Builder builder = CustomAnalyzer.builder(new ClasspathResourceLoader(AnalyzerDeserializer.class));
-        for ( Map.Entry<String, JsonElement> e : aRoot.entrySet()) {
+        for (Iterator<Map.Entry<String, JsonNode>> it = node.fields(); it.hasNext(); ) {
+            Map.Entry<String, JsonNode> e = it.next();
             String k = e.getKey();
             if (k.equals(CHAR_FILTERS)) {
                 buildCharFilters(e.getValue(), analyzerName, builder);
@@ -112,83 +87,84 @@ class AnalyzerDeserializer implements JsonDeserializer<Map<String, Analyzer>> {
         return builder.build();
     }
 
-    private static void buildTokenizerFactory(JsonElement map, String analyzerName,
+    private static void buildTokenizerFactory(JsonNode map, String analyzerName,
                                                           CustomAnalyzer.Builder builder) throws IOException {
-        if (!(map instanceof JsonObject)) {
+        if (! map.isObject()) {
             throw new IllegalArgumentException("Expecting a map with \"factory\" string and " +
                     "\"params\" map in tokenizer factory;"+
                     " not: "+map.toString() + " in "+analyzerName);
         }
-        JsonElement factoryEl = ((JsonObject)map).get(FACTORY);
-        if (factoryEl == null || ! factoryEl.isJsonPrimitive()) {
+        JsonNode factoryEl = map.get(FACTORY);
+        if (factoryEl == null || ! factoryEl.isTextual()) {
             throw new IllegalArgumentException("Expecting value for factory in char filter factory builder in:"+
                     analyzerName);
         }
-        String factoryName = factoryEl.getAsString();
+        String factoryName = factoryEl.asText();
         factoryName = factoryName.startsWith("oala.") ?
                 factoryName.replaceFirst("oala.", "org.apache.lucene.analysis.") : factoryName;
 
-        JsonElement paramsEl = ((JsonObject)map).get(PARAMS);
+        JsonNode paramsEl = map.get(PARAMS);
         Map<String, String> params = mapify(paramsEl);
         builder.withTokenizer(factoryName, params);
     }
 
-    private static void buildCharFilters(JsonElement el,
+    private static void buildCharFilters(JsonNode el,
                                                         String analyzerName, CustomAnalyzer.Builder builder) throws IOException {
-        if (el == null || el.isJsonNull()) {
+        if (el == null || el.isNull()) {
             return;
         }
-        if (! el.isJsonArray()) {
+        if (! el.isArray()) {
             throw new IllegalArgumentException("Expecting array for charfilters, but got:"+el.toString() +
                     " for "+analyzerName);
         }
-        JsonArray jsonArray = (JsonArray)el;
-        List<CharFilterFactory> ret = new LinkedList<CharFilterFactory>();
-        for (JsonElement filterMap : jsonArray) {
-            if (!(filterMap instanceof JsonObject)) {
+
+        for (Iterator<JsonNode> it = el.elements(); it.hasNext(); ) {
+            JsonNode filterMap = it.next();
+            if (! filterMap.isObject()) {
                 throw new IllegalArgumentException("Expecting a map with \"factory\" string and \"params\" map in char filter factory;"+
                         " not: "+filterMap.toString() + " in "+analyzerName);
             }
-            JsonElement factoryEl = ((JsonObject)filterMap).get(FACTORY);
-            if (factoryEl == null || ! factoryEl.isJsonPrimitive()) {
+            JsonNode factoryEl = filterMap.get(FACTORY);
+            if (factoryEl == null || ! factoryEl.isTextual()) {
                 throw new IllegalArgumentException(
                         "Expecting value for factory in char filter factory builder in:"+analyzerName);
             }
-            String factoryName = factoryEl.getAsString();
+            String factoryName = factoryEl.asText();
             factoryName = factoryName.replaceAll("oala.", "org.apache.lucene.analysis.");
 
-            JsonElement paramsEl = ((JsonObject)filterMap).get(PARAMS);
+            JsonNode paramsEl = filterMap.get(PARAMS);
             Map<String, String> params = mapify(paramsEl);
             builder.addCharFilter(factoryName, params);
         }
     }
 
-    private static void buildTokenFilterFactories(JsonElement el,
+    private static void buildTokenFilterFactories(JsonNode el,
                                                   String analyzerName,
                                                   int maxTokens, CustomAnalyzer.Builder builder) throws IOException {
-        if (el == null || el.isJsonNull()) {
+        if (el == null || el.isNull()) {
             return;
         }
-        if (! el.isJsonArray()) {
+        if (! el.isArray()) {
             throw new IllegalArgumentException(
                     "Expecting array for tokenfilters, but got:"+el.toString() + " in "+analyzerName);
         }
-        JsonArray jsonArray = (JsonArray)el;
+
         List<TokenFilterFactory> ret = new LinkedList<>();
-        for (JsonElement filterMap : jsonArray) {
-            if (!(filterMap instanceof JsonObject)) {
+        for (Iterator<JsonNode> it = el.elements(); it.hasNext(); ) {
+            JsonNode filterMap = it.next();
+            if (! filterMap.isObject()) {
                 throw new IllegalArgumentException("Expecting a map with \"factory\" string and \"params\" map in token filter factory;"+
                         " not: "+filterMap.toString() + " in "+ analyzerName);
             }
-            JsonElement factoryEl = ((JsonObject)filterMap).get(FACTORY);
-            if (factoryEl == null || ! factoryEl.isJsonPrimitive()) {
+            JsonNode factoryEl = filterMap.get(FACTORY);
+            if (factoryEl == null || ! factoryEl.isTextual()) {
                 throw new IllegalArgumentException("Expecting value for factory in token filter factory builder in "+analyzerName);
             }
-            String factoryName = factoryEl.getAsString();
+            String factoryName = factoryEl.asText();
             factoryName = factoryName.startsWith("oala.") ?
                     factoryName.replaceFirst("oala.", "org.apache.lucene.analysis.") :
                     factoryName;
-            JsonElement paramsEl = ((JsonObject)filterMap).get(PARAMS);
+            JsonNode paramsEl = filterMap.get(PARAMS);
             Map<String, String> params = mapify(paramsEl);
             builder.addTokenFilter(factoryName, params);
         }
@@ -202,20 +178,21 @@ class AnalyzerDeserializer implements JsonDeserializer<Map<String, Analyzer>> {
         }
     }
 
-    private static  Map<String, String> mapify(JsonElement paramsEl) {
-        if (paramsEl == null || paramsEl.isJsonNull()) {
+    private static  Map<String, String> mapify(JsonNode paramsEl) {
+        if (paramsEl == null || paramsEl.isNull()) {
             return Collections.EMPTY_MAP;
         }
-        if (! paramsEl.isJsonObject()) {
+        if (! paramsEl.isObject()) {
             throw new IllegalArgumentException("Expecting map, not: "+paramsEl.toString());
         }
         Map<String, String> params = new HashMap<>();
-        for (Map.Entry<String,JsonElement> e : ((JsonObject)paramsEl).entrySet()) {
-            JsonElement value = e.getValue();
-            if (! value.isJsonPrimitive()) {
+        for (Iterator<Map.Entry<String, JsonNode>> it = paramsEl.fields(); it.hasNext(); ) {
+            Map.Entry<String, JsonNode> e = it.next();
+            JsonNode value = e.getValue();
+            if (value.isObject() || value.isArray() || value.isNull()) {
                 throw new IllegalArgumentException("Expecting parameter to have primitive value: "+value.toString());
             }
-            String v = e.getValue().getAsString();
+            String v = e.getValue().asText();
             params.put(e.getKey(), v);
         }
         return params;
diff --git a/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/tokens/AnalyzerManager.java b/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/tokens/AnalyzerManager.java
index 569b1a6..a03f4d2 100644
--- a/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/tokens/AnalyzerManager.java
+++ b/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/tokens/AnalyzerManager.java
@@ -16,16 +16,15 @@
  */
 package org.apache.tika.eval.core.tokens;
 
+import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.Reader;
 import java.nio.charset.StandardCharsets;
 import java.util.Map;
 
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.google.gson.JsonParseException;
 import org.apache.lucene.analysis.Analyzer;
+import org.apache.tika.exception.TikaException;
 
 public class AnalyzerManager {
 
@@ -42,20 +41,23 @@ public class AnalyzerManager {
     }
 
     public static AnalyzerManager newInstance(int maxTokens) {
-        InputStream is = AnalyzerManager.class.getClassLoader().getResourceAsStream("lucene-analyzers.json");
-        Reader reader = new InputStreamReader(is, StandardCharsets.UTF_8);
-        GsonBuilder builder = new GsonBuilder();
-        builder.registerTypeHierarchyAdapter(Map.class, new AnalyzerDeserializer(maxTokens));
-        Gson gson = builder.create();
-        Map<String, Analyzer> map = gson.fromJson(reader, Map.class);
+        Map<String, Analyzer> map;
+        try (InputStream is = AnalyzerManager.class.getClassLoader()
+                .getResourceAsStream("lucene-analyzers.json")) {
+            try (Reader reader = new InputStreamReader(is, StandardCharsets.UTF_8)) {
+                map = AnalyzerDeserializer.buildAnalyzers(reader, maxTokens);
+            }
+        } catch (IOException e) {
+            throw new RuntimeException("Can't find lucene-analyzers.json?!");
+        }
         Analyzer general = map.get(GENERAL);
         Analyzer alphaIdeo = map.get(ALPHA_IDEOGRAPH);
         Analyzer common = map.get(COMMON_TOKENS);
         if (general == null) {
-            throw new JsonParseException("Must specify "+GENERAL + " analyzer");
+            throw new IllegalStateException("Must specify "+GENERAL + " analyzer");
         }
         if (common == null) {
-            throw new JsonParseException("Must specify "+ COMMON_TOKENS + " analyzer");
+            throw new IllegalStateException("Must specify "+ COMMON_TOKENS + " analyzer");
         }
 
         return new AnalyzerManager(general, common);
diff --git a/tika-langdetect/tika-langdetect-lingo24/pom.xml b/tika-langdetect/tika-langdetect-lingo24/pom.xml
index 487a768..67e6aa6 100644
--- a/tika-langdetect/tika-langdetect-lingo24/pom.xml
+++ b/tika-langdetect/tika-langdetect-lingo24/pom.xml
@@ -60,9 +60,9 @@
             </exclusions>
         </dependency>
         <dependency>
-            <groupId>com.google.code.gson</groupId>
-            <artifactId>gson</artifactId>
-            <version>${gson.version}</version>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-databind</artifactId>
+            <version>${jackson.version}</version>
         </dependency>
 
         <!-- Test dependencies -->
diff --git a/tika-langdetect/tika-langdetect-lingo24/src/main/java/org/apache/tika/langdetect/lingo24/Lingo24LangDetector.java b/tika-langdetect/tika-langdetect-lingo24/src/main/java/org/apache/tika/langdetect/lingo24/Lingo24LangDetector.java
index ca8e31f..ef3f19a 100644
--- a/tika-langdetect/tika-langdetect-lingo24/src/main/java/org/apache/tika/langdetect/lingo24/Lingo24LangDetector.java
+++ b/tika-langdetect/tika-langdetect-lingo24/src/main/java/org/apache/tika/langdetect/lingo24/Lingo24LangDetector.java
@@ -16,11 +16,10 @@
  */
 package org.apache.tika.langdetect.lingo24;
 
-import com.google.gson.JsonArray;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.cxf.jaxrs.client.WebClient;
-import org.apache.tika.exception.TikaException;
 import org.apache.tika.language.detect.LanguageConfidence;
 import org.apache.tika.language.detect.LanguageDetector;
 import org.apache.tika.language.detect.LanguageResult;
@@ -154,10 +153,15 @@ public class Lingo24LangDetector extends LanguageDetector {
         Response response = client.accept(MediaType.APPLICATION_JSON).form(form);
 
         String json = response.readEntity(String.class);
-        JsonElement element = new JsonParser().parse(json);
-        if (element.getAsJsonObject().get("success") != null &&
-                element.getAsJsonObject().get("success").getAsString().equals("true")) {
-            language = element.getAsJsonObject().get("lang").getAsString();
+        JsonNode element = null;
+        try {
+            element = new ObjectMapper().readTree(json);
+        } catch (JsonProcessingException e) {
+            LOG.warn("problem detecting ", e);
+        }
+        if (element.has("success") &&
+                element.get("success").asText().equals("true")) {
+            language = element.get("lang").asText();
         }
         return language;
     }
@@ -181,11 +185,13 @@ public class Lingo24LangDetector extends LanguageDetector {
                     .query("user_key", userKey).get();
 
             String json = response.readEntity(String.class);
-            JsonArray jsonArray = new JsonParser().parse(json).getAsJsonObject().get("source_langs").getAsJsonArray();
-            for (JsonElement jsonElement : jsonArray) {
-                languages.add(jsonElement.getAsJsonArray().get(0).getAsString());
+            JsonNode jsonArray = new ObjectMapper().readTree(json).get("source_langs");
+            for (JsonNode jsonElement : jsonArray) {
+                languages.add(jsonElement.get(0).asText());
             }
-        } catch (Throwable e) {
+        } catch (SecurityException e) {
+            throw e;
+        } catch (Exception e) {
             LOG.warn("problem detecting", e);
         } finally {
             if (_client != null) {
diff --git a/tika-langdetect/tika-langdetect-mitll-text/pom.xml b/tika-langdetect/tika-langdetect-mitll-text/pom.xml
index 38040ee..673cfc2 100644
--- a/tika-langdetect/tika-langdetect-mitll-text/pom.xml
+++ b/tika-langdetect/tika-langdetect-mitll-text/pom.xml
@@ -44,6 +44,11 @@
             <version>${jaxb.version}</version>
         </dependency>
         <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-databind</artifactId>
+            <version>${jackson.version}</version>
+        </dependency>
+        <dependency>
             <groupId>org.apache.cxf</groupId>
             <artifactId>cxf-rt-rs-client</artifactId>
             <version>${cxf.version}</version>
@@ -58,11 +63,6 @@
                 </exclusion>
             </exclusions>
         </dependency>
-        <dependency>
-            <groupId>com.google.code.gson</groupId>
-            <artifactId>gson</artifactId>
-            <version>${gson.version}</version>
-        </dependency>
 
         <!-- Test dependencies -->
         <dependency>
diff --git a/tika-langdetect/tika-langdetect-mitll-text/src/main/java/org/apache/tika/langdetect/mitll/TextLangDetector.java b/tika-langdetect/tika-langdetect-mitll-text/src/main/java/org/apache/tika/langdetect/mitll/TextLangDetector.java
index 9a692ed..315f0bf 100644
--- a/tika-langdetect/tika-langdetect-mitll-text/src/main/java/org/apache/tika/langdetect/mitll/TextLangDetector.java
+++ b/tika-langdetect/tika-langdetect-mitll-text/src/main/java/org/apache/tika/langdetect/mitll/TextLangDetector.java
@@ -16,9 +16,8 @@
  */
 package org.apache.tika.langdetect.mitll;
 
-import com.google.gson.JsonArray;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonParser;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.cxf.jaxrs.client.WebClient;
 import org.apache.tika.language.detect.LanguageConfidence;
 import org.apache.tika.language.detect.LanguageDetector;
@@ -111,9 +110,9 @@ public class TextLangDetector extends LanguageDetector {
                     .create(restHostUrlStr + TEXT_LID_PATH)
                     .get();
             String json = response.readEntity(String.class);
-            JsonArray jsonArray = new JsonParser().parse(json).getAsJsonObject().get("all_languages").getAsJsonArray();
-            for (JsonElement jsonElement : jsonArray) {
-                languages.add(jsonElement.toString());
+            JsonNode jsonArray = new ObjectMapper().readTree(json).get("all_languages");
+            for (JsonNode jsonElement : jsonArray) {
+                languages.add(jsonElement.asText());
             }
         } catch (Exception e) {
             LOG.warn("problem getting and parsing json", e);
@@ -128,7 +127,7 @@ public class TextLangDetector extends LanguageDetector {
                     .create(restHostUrlStr + TEXT_LID_PATH)
                     .put(content);
             String json = response.readEntity(String.class);
-            language = new JsonParser().parse(json).getAsJsonObject().get("language").getAsString();
+            language = new ObjectMapper().readTree(json).get("language").asText();
         } catch (Exception e) {
             LOG.warn("problem detecting", e);
         }
@@ -141,7 +140,7 @@ public class TextLangDetector extends LanguageDetector {
                     .create(TEXT_REST_HOST + TEXT_LID_PATH)
                     .get();
             String json = response.readEntity(String.class);
-            JsonArray jsonArray = new JsonParser().parse(json).getAsJsonObject().get("all_languages").getAsJsonArray();
+            JsonNode jsonArray = new ObjectMapper().readTree(json).get("all_languages");
             return jsonArray.size() != 0;
         } catch (Exception e) {
             LOG.warn("Can't run", e);