You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@metamodel.apache.org by ka...@apache.org on 2016/05/16 03:54:16 UTC
[41/42] metamodel git commit: Merge branch 'master' into 5.x
http://git-wip-us.apache.org/repos/asf/metamodel/blob/6d50f0e3/fixedwidth/src/main/java/org/apache/metamodel/fixedwidth/FixedWidthConfigurationReader.java
----------------------------------------------------------------------
diff --cc fixedwidth/src/main/java/org/apache/metamodel/fixedwidth/FixedWidthConfigurationReader.java
index 0000000,9154e5e..3c8d14c
mode 000000,100644..100644
--- a/fixedwidth/src/main/java/org/apache/metamodel/fixedwidth/FixedWidthConfigurationReader.java
+++ b/fixedwidth/src/main/java/org/apache/metamodel/fixedwidth/FixedWidthConfigurationReader.java
@@@ -1,0 -1,180 +1,180 @@@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.metamodel.fixedwidth;
-
-import java.io.BufferedReader;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.apache.metamodel.csv.CsvConfiguration;
-import org.apache.metamodel.csv.CsvDataContext;
-import org.apache.metamodel.data.DataSet;
-import org.apache.metamodel.schema.Table;
-import org.apache.metamodel.util.Action;
-import org.apache.metamodel.util.Resource;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Object capable of reading fixed width metadata from external sources and
- * thereby producing an appropriate {@link FixedWidthConfiguration} to use with
- * a {@link FixedWidthDataContext}.
- */
-public class FixedWidthConfigurationReader {
-
- private static final Logger logger = LoggerFactory.getLogger(FixedWidthConfigurationReader.class);
-
- // example: @1 COL1 $char1.
- private final Pattern PATTERN_SAS_INPUT_LINE = Pattern.compile("\\@(\\d+) (.+) .*?(\\d+)\\.");
-
- // example: COL1 "Record type"
- private final Pattern PATTERN_SAS_LABEL_LINE = Pattern.compile("(.+) \\\"(.+)\\\"");
-
- /**
- * Reads a {@link FixedWidthConfiguration} based on a SAS 'format file',
- * <a href=
- * "http://support.sas.com/documentation/cdl/en/etlug/67323/HTML/default/viewer.htm#p0h03yig7fp1qan1arghp3lwjqi6.htm">
- * described here</a>.
- *
- * @param encoding
- * @param resource
- * the format file resource
- * @param failOnInconsistentLineWidth
- * @return a {@link FixedWidthConfiguration} object to use
- */
- public FixedWidthConfiguration readFromSasFormatFile(String encoding, Resource resource,
- boolean failOnInconsistentLineWidth) {
- final List<FixedWidthColumnSpec> columnSpecs = new ArrayList<>();
-
- final CsvDataContext dataContext = new CsvDataContext(resource, new CsvConfiguration());
- final Table table = dataContext.getDefaultSchema().getTable(0);
- try (final DataSet dataSet = dataContext.query().from(table).select("Name", "BeginPosition", "EndPosition")
- .execute()) {
- while (dataSet.next()) {
- final String name = (String) dataSet.getRow().getValue(0);
- final int beginPosition = Integer.parseInt((String) dataSet.getRow().getValue(1));
- final int endPosition = Integer.parseInt((String) dataSet.getRow().getValue(2));
- final int width = 1 + endPosition - beginPosition;
- columnSpecs.add(new FixedWidthColumnSpec(name, width));
- }
- }
-
- return new FixedWidthConfiguration(encoding, columnSpecs, failOnInconsistentLineWidth);
- }
-
- /**
- * Reads a {@link FixedWidthConfiguration} based on a SAS INPUT declaration.
- * The reader method also optionally will look for a LABEL defintion for
- * column naming.
- *
- * @param encoding
- * @param resource
- * the format file resource
- * @param failOnInconsistentLineWidth
- * @return a {@link FixedWidthConfiguration} object to use
- */
- public FixedWidthConfiguration readFromSasInputDefinition(String encoding, Resource resource,
- boolean failOnInconsistentLineWidth) {
-
- final Map<String, Integer> inputWidthDeclarations = new LinkedHashMap<>();
- final Map<String, String> labelDeclarations = new HashMap<>();
-
- resource.read(new Action<InputStream>() {
-
- private boolean inInputSection = false;
- private boolean inLabelSection = false;
-
- @Override
- public void run(InputStream in) throws Exception {
- try (final BufferedReader reader = new BufferedReader(new InputStreamReader(in))) {
- for (String line = reader.readLine(); line != null; line = reader.readLine()) {
- processLine(line);
- }
- }
- }
-
- private void processLine(String line) {
- line = line.trim();
- if (line.isEmpty()) {
- return;
- }
- if (";".equals(line)) {
- inInputSection = false;
- inLabelSection = false;
- return;
- } else if ("INPUT".equals(line)) {
- inInputSection = true;
- return;
- } else if ("LABEL".equals(line)) {
- inLabelSection = true;
- return;
- }
-
- if (inInputSection) {
- final Matcher matcher = PATTERN_SAS_INPUT_LINE.matcher(line);
- if (matcher.matches()) {
- final String positionSpec = matcher.group(1);
- final String nameSpec = matcher.group(2);
- final int width = Integer.parseInt(matcher.group(3));
- logger.debug("Parsed INPUT line \"{}\": position={}, name={}, width={}", line, positionSpec,
- nameSpec, width);
- inputWidthDeclarations.put(nameSpec, width);
- } else {
- logger.debug("Failed to parse/recognize INPUT line \"{}\"", line);
- }
- } else if (inLabelSection) {
- final Matcher matcher = PATTERN_SAS_LABEL_LINE.matcher(line);
- if (matcher.matches()) {
- final String nameSpec = matcher.group(1);
- final String labelSpec = matcher.group(2);
- logger.debug("Parsed LABEL line \"{}\": name={}, label={}", line, nameSpec, labelSpec);
- labelDeclarations.put(nameSpec, labelSpec);
- } else {
- logger.debug("Failed to parse/recognize LABEL line \"{}\"", line);
- }
- }
-
- if (line.endsWith(";")) {
- inInputSection = false;
- inLabelSection = false;
- }
- }
- });
-
- final List<FixedWidthColumnSpec> columnSpecs = new ArrayList<>();
- for (Entry<String, Integer> entry : inputWidthDeclarations.entrySet()) {
- final String columnKey = entry.getKey();
- final Integer columnWidth = entry.getValue();
- final String columnLabel = labelDeclarations.get(columnKey);
- final String columnName = columnLabel == null ? columnKey : columnLabel;
- columnSpecs.add(new FixedWidthColumnSpec(columnName, columnWidth));
- }
-
- return new FixedWidthConfiguration(encoding, columnSpecs, failOnInconsistentLineWidth);
- }
-
-}
++/**
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements. See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership. The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License. You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied. See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++package org.apache.metamodel.fixedwidth;
++
++import java.io.BufferedReader;
++import java.io.InputStream;
++import java.io.InputStreamReader;
++import java.util.ArrayList;
++import java.util.HashMap;
++import java.util.LinkedHashMap;
++import java.util.List;
++import java.util.Map;
++import java.util.Map.Entry;
++import java.util.regex.Matcher;
++import java.util.regex.Pattern;
++
++import org.apache.metamodel.csv.CsvConfiguration;
++import org.apache.metamodel.csv.CsvDataContext;
++import org.apache.metamodel.data.DataSet;
++import org.apache.metamodel.schema.Table;
++import org.apache.metamodel.util.Action;
++import org.apache.metamodel.util.Resource;
++import org.slf4j.Logger;
++import org.slf4j.LoggerFactory;
++
++/**
++ * Object capable of reading fixed width metadata from external sources and
++ * thereby producing an appropriate {@link FixedWidthConfiguration} to use with
++ * a {@link FixedWidthDataContext}.
++ */
++public class FixedWidthConfigurationReader {
++
++ private static final Logger logger = LoggerFactory.getLogger(FixedWidthConfigurationReader.class);
++
++ // example: @1 COL1 $char1.
++ private final Pattern PATTERN_SAS_INPUT_LINE = Pattern.compile("\\@(\\d+) (.+) .*?(\\d+)\\.");
++
++ // example: COL1 "Record type"
++ private final Pattern PATTERN_SAS_LABEL_LINE = Pattern.compile("(.+) \\\"(.+)\\\"");
++
++ /**
++ * Reads a {@link FixedWidthConfiguration} based on a SAS 'format file',
++ * <a href=
++ * "http://support.sas.com/documentation/cdl/en/etlug/67323/HTML/default/viewer.htm#p0h03yig7fp1qan1arghp3lwjqi6.htm">
++ * described here</a>.
++ *
++ * @param encoding
++ * @param resource
++ * the format file resource
++ * @param failOnInconsistentLineWidth
++ * @return a {@link FixedWidthConfiguration} object to use
++ */
++ public FixedWidthConfiguration readFromSasFormatFile(String encoding, Resource resource,
++ boolean failOnInconsistentLineWidth) {
++ final List<FixedWidthColumnSpec> columnSpecs = new ArrayList<>();
++
++ final CsvDataContext dataContext = new CsvDataContext(resource, new CsvConfiguration());
++ final Table table = dataContext.getDefaultSchema().getTable(0);
++ try (final DataSet dataSet = dataContext.query().from(table).select("Name", "BeginPosition", "EndPosition")
++ .execute()) {
++ while (dataSet.next()) {
++ final String name = (String) dataSet.getRow().getValue(0);
++ final int beginPosition = Integer.parseInt((String) dataSet.getRow().getValue(1));
++ final int endPosition = Integer.parseInt((String) dataSet.getRow().getValue(2));
++ final int width = 1 + endPosition - beginPosition;
++ columnSpecs.add(new FixedWidthColumnSpec(name, width));
++ }
++ }
++
++ return new FixedWidthConfiguration(encoding, columnSpecs, failOnInconsistentLineWidth);
++ }
++
++ /**
++ * Reads a {@link FixedWidthConfiguration} based on a SAS INPUT declaration.
++ * The reader method also optionally will look for a LABEL defintion for
++ * column naming.
++ *
++ * @param encoding
++ * @param resource
++ * the format file resource
++ * @param failOnInconsistentLineWidth
++ * @return a {@link FixedWidthConfiguration} object to use
++ */
++ public FixedWidthConfiguration readFromSasInputDefinition(String encoding, Resource resource,
++ boolean failOnInconsistentLineWidth) {
++
++ final Map<String, Integer> inputWidthDeclarations = new LinkedHashMap<>();
++ final Map<String, String> labelDeclarations = new HashMap<>();
++
++ resource.read(new Action<InputStream>() {
++
++ private boolean inInputSection = false;
++ private boolean inLabelSection = false;
++
++ @Override
++ public void run(InputStream in) throws Exception {
++ try (final BufferedReader reader = new BufferedReader(new InputStreamReader(in))) {
++ for (String line = reader.readLine(); line != null; line = reader.readLine()) {
++ processLine(line);
++ }
++ }
++ }
++
++ private void processLine(String line) {
++ line = line.trim();
++ if (line.isEmpty()) {
++ return;
++ }
++ if (";".equals(line)) {
++ inInputSection = false;
++ inLabelSection = false;
++ return;
++ } else if ("INPUT".equals(line)) {
++ inInputSection = true;
++ return;
++ } else if ("LABEL".equals(line)) {
++ inLabelSection = true;
++ return;
++ }
++
++ if (inInputSection) {
++ final Matcher matcher = PATTERN_SAS_INPUT_LINE.matcher(line);
++ if (matcher.matches()) {
++ final String positionSpec = matcher.group(1);
++ final String nameSpec = matcher.group(2);
++ final int width = Integer.parseInt(matcher.group(3));
++ logger.debug("Parsed INPUT line \"{}\": position={}, name={}, width={}", line, positionSpec,
++ nameSpec, width);
++ inputWidthDeclarations.put(nameSpec, width);
++ } else {
++ logger.debug("Failed to parse/recognize INPUT line \"{}\"", line);
++ }
++ } else if (inLabelSection) {
++ final Matcher matcher = PATTERN_SAS_LABEL_LINE.matcher(line);
++ if (matcher.matches()) {
++ final String nameSpec = matcher.group(1);
++ final String labelSpec = matcher.group(2);
++ logger.debug("Parsed LABEL line \"{}\": name={}, label={}", line, nameSpec, labelSpec);
++ labelDeclarations.put(nameSpec, labelSpec);
++ } else {
++ logger.debug("Failed to parse/recognize LABEL line \"{}\"", line);
++ }
++ }
++
++ if (line.endsWith(";")) {
++ inInputSection = false;
++ inLabelSection = false;
++ }
++ }
++ });
++
++ final List<FixedWidthColumnSpec> columnSpecs = new ArrayList<>();
++ for (Entry<String, Integer> entry : inputWidthDeclarations.entrySet()) {
++ final String columnKey = entry.getKey();
++ final Integer columnWidth = entry.getValue();
++ final String columnLabel = labelDeclarations.get(columnKey);
++ final String columnName = columnLabel == null ? columnKey : columnLabel;
++ columnSpecs.add(new FixedWidthColumnSpec(columnName, columnWidth));
++ }
++
++ return new FixedWidthConfiguration(encoding, columnSpecs, failOnInconsistentLineWidth);
++ }
++
++}
http://git-wip-us.apache.org/repos/asf/metamodel/blob/6d50f0e3/fixedwidth/src/test/java/org/apache/metamodel/fixedwidth/FixedWidthConfigurationReaderTest.java
----------------------------------------------------------------------
diff --cc fixedwidth/src/test/java/org/apache/metamodel/fixedwidth/FixedWidthConfigurationReaderTest.java
index 0000000,eb57233..c34b294
mode 000000,100644..100644
--- a/fixedwidth/src/test/java/org/apache/metamodel/fixedwidth/FixedWidthConfigurationReaderTest.java
+++ b/fixedwidth/src/test/java/org/apache/metamodel/fixedwidth/FixedWidthConfigurationReaderTest.java
@@@ -1,0 -1,89 +1,89 @@@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.metamodel.fixedwidth;
-
-import static org.junit.Assert.*;
-
-import java.util.Arrays;
-
-import org.apache.metamodel.DataContext;
-import org.apache.metamodel.data.DataSet;
-import org.apache.metamodel.schema.Table;
-import org.apache.metamodel.util.FileResource;
-import org.apache.metamodel.util.Resource;
-import org.junit.Test;
-
-public class FixedWidthConfigurationReaderTest {
-
- private final FileResource dataResource = new FileResource("src/test/resources/metadata_spec1/data.txt");
-
- @Test
- public void testReadConfigurationFromSasFormatFile() throws Exception {
- final FixedWidthConfigurationReader reader = new FixedWidthConfigurationReader();
- final Resource resource = new FileResource("src/test/resources/metadata_spec1/sas-formatfile-metadata.txt");
- assertTrue(resource.isExists());
-
- final FixedWidthConfiguration configuration = reader.readFromSasFormatFile("UTF8", resource, false);
- assertEquals("[1, 20, 2]", Arrays.toString(configuration.getValueWidths()));
-
- final FixedWidthDataContext dataContext = new FixedWidthDataContext(dataResource, configuration);
-
- performAssertionsOnSpec1(dataContext);
- }
-
- @Test
- public void testReadConfigurationFromSasInputMetadata() throws Exception {
- final FixedWidthConfigurationReader reader = new FixedWidthConfigurationReader();
- final Resource resource = new FileResource("src/test/resources/metadata_spec1/sas-input-metadata.txt");
- assertTrue(resource.isExists());
-
- final FixedWidthConfiguration configuration = reader.readFromSasInputDefinition("UTF8", resource, false);
- assertEquals("[1, 20, 2]", Arrays.toString(configuration.getValueWidths()));
-
- final FixedWidthDataContext dataContext = new FixedWidthDataContext(dataResource, configuration);
-
- performAssertionsOnSpec1(dataContext);
- }
-
- /**
- * Shared assertions section once the 'metadata_spec1' {@link DataContext}
- * has been loaded.
- *
- * @param dataContext
- */
- private void performAssertionsOnSpec1(FixedWidthDataContext dataContext) {
- final Table table = dataContext.getDefaultSchema().getTable(0);
- final String[] columnNames = table.getColumnNames();
- assertEquals("[Record type, Description, Initials]", Arrays.toString(columnNames));
-
- try (final DataSet dataSet = dataContext.query().from(table).selectAll().execute()) {
- assertTrue(dataSet.next());
- assertEquals("Row[values=[P, Kasper Sorensen, KS]]", dataSet.getRow().toString());
- assertTrue(dataSet.next());
- assertEquals("Row[values=[C, Human Inference, HI]]", dataSet.getRow().toString());
- assertTrue(dataSet.next());
- assertEquals("Row[values=[P, Ankit Kumar, AK]]", dataSet.getRow().toString());
- assertTrue(dataSet.next());
- assertEquals("Row[values=[C, Stratio, S]]", dataSet.getRow().toString());
- assertTrue(dataSet.next());
- assertEquals("Row[values=[U, Unknown, ]]", dataSet.getRow().toString());
- assertFalse(dataSet.next());
- }
- }
-}
++/**
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements. See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership. The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License. You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied. See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++package org.apache.metamodel.fixedwidth;
++
++import static org.junit.Assert.*;
++
++import java.util.Arrays;
++
++import org.apache.metamodel.DataContext;
++import org.apache.metamodel.data.DataSet;
++import org.apache.metamodel.schema.Table;
++import org.apache.metamodel.util.FileResource;
++import org.apache.metamodel.util.Resource;
++import org.junit.Test;
++
++public class FixedWidthConfigurationReaderTest {
++
++ private final FileResource dataResource = new FileResource("src/test/resources/metadata_spec1/data.txt");
++
++ @Test
++ public void testReadConfigurationFromSasFormatFile() throws Exception {
++ final FixedWidthConfigurationReader reader = new FixedWidthConfigurationReader();
++ final Resource resource = new FileResource("src/test/resources/metadata_spec1/sas-formatfile-metadata.txt");
++ assertTrue(resource.isExists());
++
++ final FixedWidthConfiguration configuration = reader.readFromSasFormatFile("UTF8", resource, false);
++ assertEquals("[1, 20, 2]", Arrays.toString(configuration.getValueWidths()));
++
++ final FixedWidthDataContext dataContext = new FixedWidthDataContext(dataResource, configuration);
++
++ performAssertionsOnSpec1(dataContext);
++ }
++
++ @Test
++ public void testReadConfigurationFromSasInputMetadata() throws Exception {
++ final FixedWidthConfigurationReader reader = new FixedWidthConfigurationReader();
++ final Resource resource = new FileResource("src/test/resources/metadata_spec1/sas-input-metadata.txt");
++ assertTrue(resource.isExists());
++
++ final FixedWidthConfiguration configuration = reader.readFromSasInputDefinition("UTF8", resource, false);
++ assertEquals("[1, 20, 2]", Arrays.toString(configuration.getValueWidths()));
++
++ final FixedWidthDataContext dataContext = new FixedWidthDataContext(dataResource, configuration);
++
++ performAssertionsOnSpec1(dataContext);
++ }
++
++ /**
++ * Shared assertions section once the 'metadata_spec1' {@link DataContext}
++ * has been loaded.
++ *
++ * @param dataContext
++ */
++ private void performAssertionsOnSpec1(FixedWidthDataContext dataContext) {
++ final Table table = dataContext.getDefaultSchema().getTable(0);
++ final String[] columnNames = table.getColumnNames();
++ assertEquals("[Record type, Description, Initials]", Arrays.toString(columnNames));
++
++ try (final DataSet dataSet = dataContext.query().from(table).selectAll().execute()) {
++ assertTrue(dataSet.next());
++ assertEquals("Row[values=[P, Kasper Sorensen, KS]]", dataSet.getRow().toString());
++ assertTrue(dataSet.next());
++ assertEquals("Row[values=[C, Human Inference, HI]]", dataSet.getRow().toString());
++ assertTrue(dataSet.next());
++ assertEquals("Row[values=[P, Ankit Kumar, AK]]", dataSet.getRow().toString());
++ assertTrue(dataSet.next());
++ assertEquals("Row[values=[C, Stratio, S]]", dataSet.getRow().toString());
++ assertTrue(dataSet.next());
++ assertEquals("Row[values=[U, Unknown, ]]", dataSet.getRow().toString());
++ assertFalse(dataSet.next());
++ }
++ }
++}
http://git-wip-us.apache.org/repos/asf/metamodel/blob/6d50f0e3/fixedwidth/src/test/resources/metadata_spec1/data.txt
----------------------------------------------------------------------
diff --cc fixedwidth/src/test/resources/metadata_spec1/data.txt
index 0000000,785a539..ac055c9
mode 000000,100644..100644
--- a/fixedwidth/src/test/resources/metadata_spec1/data.txt
+++ b/fixedwidth/src/test/resources/metadata_spec1/data.txt
@@@ -1,0 -1,5 +1,5 @@@
-PKasper Sorensen KS
-CHuman Inference HI
-PAnkit Kumar AK
-CStratio S
-UUnknown
++PKasper Sorensen KS
++CHuman Inference HI
++PAnkit Kumar AK
++CStratio S
++UUnknown
http://git-wip-us.apache.org/repos/asf/metamodel/blob/6d50f0e3/fixedwidth/src/test/resources/metadata_spec1/sas-formatfile-metadata.txt
----------------------------------------------------------------------
diff --cc fixedwidth/src/test/resources/metadata_spec1/sas-formatfile-metadata.txt
index 0000000,9bbe411..38b0e04
mode 000000,100644..100644
--- a/fixedwidth/src/test/resources/metadata_spec1/sas-formatfile-metadata.txt
+++ b/fixedwidth/src/test/resources/metadata_spec1/sas-formatfile-metadata.txt
@@@ -1,0 -1,4 +1,4 @@@
-Name,SASColumnType,BeginPosition,EndPosition,ReadFlag,Desc,SASFormat,SASInformat
-Record type,C,1,1,y,Record Type,$char.,$char.
-Description,C,2,21,y,Description of record,$char.,$char.
-Initials,C,22,23,y,Initials of record,,
++Name,SASColumnType,BeginPosition,EndPosition,ReadFlag,Desc,SASFormat,SASInformat
++Record type,C,1,1,y,Record Type,$char.,$char.
++Description,C,2,21,y,Description of record,$char.,$char.
++Initials,C,22,23,y,Initials of record,,
http://git-wip-us.apache.org/repos/asf/metamodel/blob/6d50f0e3/fixedwidth/src/test/resources/metadata_spec1/sas-input-metadata.txt
----------------------------------------------------------------------
diff --cc fixedwidth/src/test/resources/metadata_spec1/sas-input-metadata.txt
index 0000000,f12e418..6839a9b
mode 000000,100644..100644
--- a/fixedwidth/src/test/resources/metadata_spec1/sas-input-metadata.txt
+++ b/fixedwidth/src/test/resources/metadata_spec1/sas-input-metadata.txt
@@@ -1,0 -1,19 +1,19 @@@
-INPUT
-
- @1 COL1 $char1.
-
- @2 COL2 $char20.
-
- @22 COL3 $char2.
-
-;
-
-LABEL
-
- COL1 "Record type"
-
- COL2 "Description"
-
- COL3 "Initials"
-
-;
++INPUT
++
++ @1 COL1 $char1.
++
++ @2 COL2 $char20.
++
++ @22 COL3 $char2.
++
++;
++
++LABEL
++
++ COL1 "Record type"
++
++ COL2 "Description"
++
++ COL3 "Initials"
++
++;
http://git-wip-us.apache.org/repos/asf/metamodel/blob/6d50f0e3/full/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/metamodel/blob/6d50f0e3/mongodb/common/pom.xml
----------------------------------------------------------------------
diff --cc mongodb/common/pom.xml
index 0000000,850d2f7..9675d9e
mode 000000,100644..100644
--- a/mongodb/common/pom.xml
+++ b/mongodb/common/pom.xml
@@@ -1,0 -1,67 +1,67 @@@
+ <?xml version="1.0" encoding="UTF-8" ?>
+ <!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+ -->
+ <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <artifactId>MetaModel-mongodb</artifactId>
+ <groupId>org.apache.metamodel</groupId>
- <version>4.5.3-SNAPSHOT</version>
++ <version>5.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>MetaModel-mongodb-common</artifactId>
+ <name>MetaModel module for MongoDB commons</name>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.metamodel</groupId>
+ <artifactId>MetaModel-core</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mongodb</groupId>
+ <artifactId>mongo-java-driver</artifactId>
+ <version>3.1.0</version>
+ <scope>provided</scope>
+ </dependency>
+
+ <!-- Test dependencies -->
+ <dependency>
+ <groupId>org.apache.metamodel</groupId>
+ <artifactId>MetaModel-jdbc</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.derby</groupId>
+ <artifactId>derby</artifactId>
+ <version>10.8.1.2</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-nop</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </project>
http://git-wip-us.apache.org/repos/asf/metamodel/blob/6d50f0e3/mongodb/mongo2/pom.xml
----------------------------------------------------------------------
diff --cc mongodb/mongo2/pom.xml
index 0000000,4905102..8cd2540
mode 000000,100644..100644
--- a/mongodb/mongo2/pom.xml
+++ b/mongodb/mongo2/pom.xml
@@@ -1,0 -1,70 +1,70 @@@
+ <?xml version="1.0" encoding="UTF-8" ?>
+ <!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+ -->
+ <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <artifactId>MetaModel-mongodb</artifactId>
+ <groupId>org.apache.metamodel</groupId>
- <version>4.5.3-SNAPSHOT</version>
++ <version>5.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>MetaModel-mongodb-mongo2</artifactId>
+ <name>MetaModel module for MongoDB 2.x</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.metamodel</groupId>
+ <artifactId>MetaModel-core</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.metamodel</groupId>
+ <artifactId>MetaModel-mongodb-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mongodb</groupId>
+ <artifactId>mongo-java-driver</artifactId>
+ <version>2.14.0</version>
+ </dependency>
+
+ <!-- Test dependencies -->
+ <dependency>
+ <groupId>org.apache.metamodel</groupId>
+ <artifactId>MetaModel-jdbc</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.derby</groupId>
+ <artifactId>derby</artifactId>
+ <version>10.8.1.2</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-nop</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </project>
http://git-wip-us.apache.org/repos/asf/metamodel/blob/6d50f0e3/mongodb/mongo2/src/main/java/org/apache/metamodel/mongodb/mongo2/MongoDbDataContext.java
----------------------------------------------------------------------
diff --cc mongodb/mongo2/src/main/java/org/apache/metamodel/mongodb/mongo2/MongoDbDataContext.java
index 0000000,cfeb836..d2095e9
mode 000000,100644..100644
--- a/mongodb/mongo2/src/main/java/org/apache/metamodel/mongodb/mongo2/MongoDbDataContext.java
+++ b/mongodb/mongo2/src/main/java/org/apache/metamodel/mongodb/mongo2/MongoDbDataContext.java
@@@ -1,0 -1,528 +1,530 @@@
+ /**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ package org.apache.metamodel.mongodb.mongo2;
+
+ import java.util.ArrayList;
+ import java.util.HashSet;
+ import java.util.List;
+ import java.util.Map.Entry;
+ import java.util.Set;
+ import java.util.SortedMap;
+ import java.util.TreeMap;
+ import java.util.regex.Pattern;
+
+ import org.apache.metamodel.DataContext;
+ import org.apache.metamodel.MetaModelException;
+ import org.apache.metamodel.QueryPostprocessDataContext;
+ import org.apache.metamodel.UpdateScript;
++import org.apache.metamodel.UpdateSummary;
+ import org.apache.metamodel.UpdateableDataContext;
+ import org.apache.metamodel.data.DataSet;
+ import org.apache.metamodel.data.DataSetHeader;
+ import org.apache.metamodel.data.InMemoryDataSet;
+ import org.apache.metamodel.data.Row;
+ import org.apache.metamodel.data.SimpleDataSetHeader;
+ import org.apache.metamodel.mongodb.common.MongoDBUtils;
+ import org.apache.metamodel.mongodb.common.MongoDbTableDef;
+ import org.apache.metamodel.query.FilterItem;
+ import org.apache.metamodel.query.FromItem;
+ import org.apache.metamodel.query.OperatorType;
+ import org.apache.metamodel.query.Query;
+ import org.apache.metamodel.query.SelectItem;
+ import org.apache.metamodel.schema.Column;
+ import org.apache.metamodel.schema.ColumnType;
+ import org.apache.metamodel.schema.ColumnTypeImpl;
+ import org.apache.metamodel.schema.MutableColumn;
+ import org.apache.metamodel.schema.MutableSchema;
+ import org.apache.metamodel.schema.MutableTable;
+ import org.apache.metamodel.schema.Schema;
+ import org.apache.metamodel.schema.Table;
+ import org.apache.metamodel.util.SimpleTableDef;
+ import org.bson.types.ObjectId;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+
+ import com.mongodb.BasicDBList;
+ import com.mongodb.BasicDBObject;
+ import com.mongodb.DB;
+ import com.mongodb.DBCollection;
+ import com.mongodb.DBCursor;
+ import com.mongodb.DBObject;
+ import com.mongodb.WriteConcern;
+
+ /**
+ * DataContext implementation for MongoDB.
+ *
+ * Since MongoDB has no schema, a virtual schema will be used in this
+ * DataContext. This implementation supports either automatic discovery of a
+ * schema or manual specification of a schema, through the
+ * {@link MongoDbTableDef} class.
+ */
+ public class MongoDbDataContext extends QueryPostprocessDataContext implements UpdateableDataContext {
+
+ private static final Logger logger = LoggerFactory.getLogger(MongoDbDataSet.class);
+
+ private final DB _mongoDb;
+ private final SimpleTableDef[] _tableDefs;
+ private WriteConcernAdvisor _writeConcernAdvisor;
+ private Schema _schema;
+
+ /**
+ * Constructor available for backwards compatibility
+ *
+ * @deprecated use {@link #MongoDbDataContext(DB, SimpleTableDef...)}
+ * instead
+ */
+ @Deprecated
+ public MongoDbDataContext(DB mongoDb, MongoDbTableDef... tableDefs) {
+ this(mongoDb, (SimpleTableDef[]) tableDefs);
+ }
+
+ /**
+ * Constructs a {@link MongoDbDataContext}. This constructor accepts a
+ * custom array of {@link MongoDbTableDef}s which allows the user to define
+ * his own view on the collections in the database.
+ *
+ * @param mongoDb
+ * the mongo db connection
+ * @param tableDefs
+ * an array of {@link MongoDbTableDef}s, which define the table
+ * and column model of the mongo db collections. (consider using
+ * {@link #detectSchema(DB)} or {@link #detectTable(DB, String)}
+ * ).
+ */
+ public MongoDbDataContext(DB mongoDb, SimpleTableDef... tableDefs) {
+ _mongoDb = mongoDb;
+ _tableDefs = tableDefs;
+ _schema = null;
+ }
+
+ /**
+ * Constructs a {@link MongoDbDataContext} and automatically detects the
+ * schema structure/view on all collections (see {@link #detectSchema(DB)}).
+ *
+ * @param mongoDb
+ * the mongo db connection
+ */
+ public MongoDbDataContext(DB mongoDb) {
+ this(mongoDb, detectSchema(mongoDb));
+ }
+
+ /**
+ * Performs an analysis of the available collections in a Mongo {@link DB}
+ * instance and tries to detect the table's structure based on the first
+ * 1000 documents in each collection.
+ *
+ * @param db
+ * the mongo db to inspect
+ * @return a mutable schema instance, useful for further fine tuning by the
+ * user.
+ * @see #detectTable(DB, String)
+ */
+ public static SimpleTableDef[] detectSchema(DB db) {
+ Set<String> collectionNames = db.getCollectionNames();
+ SimpleTableDef[] result = new SimpleTableDef[collectionNames.size()];
+ int i = 0;
+ for (String collectionName : collectionNames) {
+ SimpleTableDef table = detectTable(db, collectionName);
+ result[i] = table;
+ i++;
+ }
+ return result;
+ }
+
+ /**
+ * Performs an analysis of an available collection in a Mongo {@link DB}
+ * instance and tries to detect the table structure based on the first 1000
+ * documents in the collection.
+ *
+ * @param db
+ * the mongo DB
+ * @param collectionName
+ * the name of the collection
+ * @return a table definition for mongo db.
+ */
+ public static SimpleTableDef detectTable(DB db, String collectionName) {
+ final DBCollection collection = db.getCollection(collectionName);
+ final DBCursor cursor = collection.find().limit(1000);
+
+ final SortedMap<String, Set<Class<?>>> columnsAndTypes = new TreeMap<String, Set<Class<?>>>();
+ while (cursor.hasNext()) {
+ DBObject object = cursor.next();
+ Set<String> keysInObject = object.keySet();
+ for (String key : keysInObject) {
+ Set<Class<?>> types = columnsAndTypes.get(key);
+ if (types == null) {
+ types = new HashSet<Class<?>>();
+ columnsAndTypes.put(key, types);
+ }
+ Object value = object.get(key);
+ if (value != null) {
+ types.add(value.getClass());
+ }
+ }
+ }
+ cursor.close();
+
+ final String[] columnNames = new String[columnsAndTypes.size()];
+ final ColumnType[] columnTypes = new ColumnType[columnsAndTypes.size()];
+
+ int i = 0;
+ for (Entry<String, Set<Class<?>>> columnAndTypes : columnsAndTypes.entrySet()) {
+ final String columnName = columnAndTypes.getKey();
+ final Set<Class<?>> columnTypeSet = columnAndTypes.getValue();
+ final Class<?> columnType;
+ if (columnTypeSet.size() == 1) {
+ columnType = columnTypeSet.iterator().next();
+ } else {
+ columnType = Object.class;
+ }
+ columnNames[i] = columnName;
+ if (columnType == ObjectId.class) {
+ columnTypes[i] = ColumnType.ROWID;
+ } else {
+ columnTypes[i] = ColumnTypeImpl.convertColumnType(columnType);
+ }
+ i++;
+ }
+
+ return new SimpleTableDef(collectionName, columnNames, columnTypes);
+ }
+
+ @Override
+ protected Schema getMainSchema() throws MetaModelException {
+ if (_schema == null) {
+ MutableSchema schema = new MutableSchema(getMainSchemaName());
+ for (SimpleTableDef tableDef : _tableDefs) {
+
+ MutableTable table = tableDef.toTable().setSchema(schema);
+ Column[] rowIdColumns = table.getColumnsOfType(ColumnType.ROWID);
+ for (Column column : rowIdColumns) {
+ if (column instanceof MutableColumn) {
+ ((MutableColumn) column).setPrimaryKey(true);
+ }
+ }
+
+ schema.addTable(table);
+ }
+
+ _schema = schema;
+ }
+ return _schema;
+ }
+
+ @Override
+ protected String getMainSchemaName() throws MetaModelException {
+ return _mongoDb.getName();
+ }
+
+ @Override
+ protected Number executeCountQuery(Table table, List<FilterItem> whereItems, boolean functionApproximationAllowed) {
+ final DBCollection collection = _mongoDb.getCollection(table.getName());
+
+ final DBObject query = createMongoDbQuery(table, whereItems);
+
+ logger.info("Executing MongoDB 'count' query: {}", query);
+ final long count = collection.count(query);
+
+ return count;
+ }
+
+ @Override
+ protected Row executePrimaryKeyLookupQuery(Table table, List<SelectItem> selectItems, Column primaryKeyColumn,
+ Object keyValue) {
+ final DBCollection collection = _mongoDb.getCollection(table.getName());
+
+ List<FilterItem> whereItems = new ArrayList<FilterItem>();
+ SelectItem selectItem = new SelectItem(primaryKeyColumn);
+ FilterItem primaryKeyWhereItem = new FilterItem(selectItem, OperatorType.EQUALS_TO, keyValue);
+ whereItems.add(primaryKeyWhereItem);
+ final DBObject query = createMongoDbQuery(table, whereItems);
+ final DBObject resultDBObject = collection.findOne(query);
+
+ DataSetHeader header = new SimpleDataSetHeader(selectItems);
+
+ Row row = MongoDBUtils.toRow(resultDBObject, header);
+
+ return row;
+ }
+
+ @Override
+ public DataSet executeQuery(Query query) {
+ // Check for queries containing only simple selects and where clauses,
+ // or if it is a COUNT(*) query.
+
+ // if from clause only contains a main schema table
+ List<FromItem> fromItems = query.getFromClause().getItems();
+ if (fromItems.size() == 1 && fromItems.get(0).getTable() != null
+ && fromItems.get(0).getTable().getSchema() == _schema) {
+ final Table table = fromItems.get(0).getTable();
+
+ // if GROUP BY, HAVING and ORDER BY clauses are not specified
+ if (query.getGroupByClause().isEmpty() && query.getHavingClause().isEmpty()
+ && query.getOrderByClause().isEmpty()) {
+
+ final List<FilterItem> whereItems = query.getWhereClause().getItems();
+
+ // if all of the select items are "pure" column selection
+ boolean allSelectItemsAreColumns = true;
+ List<SelectItem> selectItems = query.getSelectClause().getItems();
+
+ // if it is a
+ // "SELECT [columns] FROM [table] WHERE [conditions]"
+ // query.
+ for (SelectItem selectItem : selectItems) {
+ if (selectItem.getFunction() != null || selectItem.getColumn() == null) {
+ allSelectItemsAreColumns = false;
+ break;
+ }
+ }
+
+ if (allSelectItemsAreColumns) {
+ logger.debug("Query can be expressed in full MongoDB, no post processing needed.");
+
+ // prepare for a non-post-processed query
+ Column[] columns = new Column[selectItems.size()];
+ for (int i = 0; i < columns.length; i++) {
+ columns[i] = selectItems.get(i).getColumn();
+ }
+
+ // checking if the query is a primary key lookup query
+ if (whereItems.size() == 1) {
+ final FilterItem whereItem = whereItems.get(0);
+ final SelectItem selectItem = whereItem.getSelectItem();
+ if (!whereItem.isCompoundFilter() && selectItem != null && selectItem.getColumn() != null) {
+ final Column column = selectItem.getColumn();
+ if (column.isPrimaryKey() && OperatorType.EQUALS_TO.equals(whereItem.getOperator())) {
+ logger.debug("Query is a primary key lookup query. Trying executePrimaryKeyLookupQuery(...)");
+ final Object operand = whereItem.getOperand();
+ final Row row = executePrimaryKeyLookupQuery(table, selectItems, column, operand);
+ if (row == null) {
+ logger.debug("DataContext did not return any primary key lookup query results. Proceeding "
+ + "with manual lookup.");
+ } else {
+ final DataSetHeader header = new SimpleDataSetHeader(selectItems);
+ return new InMemoryDataSet(header, row);
+ }
+ }
+ }
+ }
+
+ int firstRow = (query.getFirstRow() == null ? 1 : query.getFirstRow());
+ int maxRows = (query.getMaxRows() == null ? -1 : query.getMaxRows());
+
+ final DataSet dataSet = materializeMainSchemaTableInternal(table, columns, whereItems, firstRow,
+ maxRows, false);
+ return dataSet;
+ }
+ }
+ }
+
+ logger.debug("Query will be simplified for MongoDB and post processed.");
+ return super.executeQuery(query);
+ }
+
+ private DataSet materializeMainSchemaTableInternal(Table table, Column[] columns, List<FilterItem> whereItems,
+ int firstRow, int maxRows, boolean queryPostProcessed) {
+ final DBCollection collection = _mongoDb.getCollection(table.getName());
+
+ final DBObject query = createMongoDbQuery(table, whereItems);
+
+ logger.info("Executing MongoDB 'find' query: {}", query);
+ DBCursor cursor = collection.find(query);
+
+ if (maxRows > 0) {
+ cursor = cursor.limit(maxRows);
+ }
+ if (firstRow > 1) {
+ final int skip = firstRow - 1;
+ cursor = cursor.skip(skip);
+ }
+
+ return new MongoDbDataSet(cursor, columns, queryPostProcessed);
+ }
+
+ protected BasicDBObject createMongoDbQuery(Table table, List<FilterItem> whereItems) {
+ assert _schema == table.getSchema();
+
+ final BasicDBObject query = new BasicDBObject();
+ if (whereItems != null && !whereItems.isEmpty()) {
+ for (FilterItem item : whereItems) {
+ convertToCursorObject(query, item);
+ }
+ }
+
+ return query;
+ }
+
+ private void convertToCursorObject(BasicDBObject query, FilterItem item) {
+ if (item.isCompoundFilter()) {
+
+ BasicDBList orList = new BasicDBList();
+
+ final FilterItem[] childItems = item.getChildItems();
+ for (FilterItem childItem : childItems) {
+ BasicDBObject childObject = new BasicDBObject();
+ convertToCursorObject(childObject, childItem);
+ orList.add(childObject);
+ }
+
+ query.put("$or", orList);
+
+ } else {
+
+ final Column column = item.getSelectItem().getColumn();
+ final String columnName = column.getName();
+ final String operatorName = getOperatorName(item);
+
+ Object operand = item.getOperand();
+ if (ObjectId.isValid(String.valueOf(operand))) {
+ operand = new ObjectId(String.valueOf(operand));
+ }
+
+ final BasicDBObject existingFilterObject = (BasicDBObject) query.get(columnName);
+ if (existingFilterObject == null) {
+ if (operatorName == null) {
+ if (OperatorType.LIKE.equals(item.getOperator())) {
+ query.put(columnName, turnOperandIntoRegExp(operand));
+ } else {
+ query.put(columnName, operand);
+ }
+ } else {
+ query.put(columnName, new BasicDBObject(operatorName, operand));
+ }
+ } else {
+ if (operatorName == null) {
+ throw new IllegalStateException("Cannot retrieve records for a column with two EQUALS_TO operators");
+ } else {
+ existingFilterObject.append(operatorName, operand);
+ }
+ }
+ }
+ }
+
+ private String getOperatorName(FilterItem item) {
+ final OperatorType operator = item.getOperator();
+
+ if (OperatorType.EQUALS_TO.equals(operator)) {
+ return null;
+ }
+ if (OperatorType.LIKE.equals(operator)) {
+ return null;
+ }
+ if (OperatorType.LESS_THAN.equals(operator)) {
+ return "$lt";
+ }
+ if (OperatorType.LESS_THAN_OR_EQUAL.equals(operator)) {
+ return "$lte";
+ }
+ if (OperatorType.GREATER_THAN.equals(operator)) {
+ return "$gt";
+ }
+ if (OperatorType.GREATER_THAN_OR_EQUAL.equals(operator)) {
+ return "$gte";
+ }
+ if (OperatorType.DIFFERENT_FROM.equals(operator)) {
+ return "$ne";
+ }
+ if (OperatorType.IN.equals(operator)) {
+ return "$in";
+ }
+
+ throw new IllegalStateException("Unsupported operator type: " + operator);
+ }
+
+ private Pattern turnOperandIntoRegExp(Object operand) {
+ StringBuilder operandAsRegExp = new StringBuilder(replaceWildCardLikeChars(operand.toString()));
+ operandAsRegExp.insert(0, "^").append("$");
+ return Pattern.compile(operandAsRegExp.toString(), Pattern.CASE_INSENSITIVE);
+ }
+
+ private String replaceWildCardLikeChars(String operand) {
+ return operand.replaceAll("%", ".*");
+ }
+
+ @Override
+ protected DataSet materializeMainSchemaTable(Table table, Column[] columns, int maxRows) {
+ return materializeMainSchemaTableInternal(table, columns, null, 1, maxRows, true);
+ }
+
+ @Override
+ protected DataSet materializeMainSchemaTable(Table table, Column[] columns, int firstRow, int maxRows) {
+ return materializeMainSchemaTableInternal(table, columns, null, firstRow, maxRows, true);
+ }
+
+ /**
+ * Executes an update with a specific {@link WriteConcernAdvisor}.
+ */
- public void executeUpdate(UpdateScript update, WriteConcernAdvisor writeConcernAdvisor) {
++ public UpdateSummary executeUpdate(UpdateScript update, WriteConcernAdvisor writeConcernAdvisor) {
+ MongoDbUpdateCallback callback = new MongoDbUpdateCallback(this, writeConcernAdvisor);
+ try {
+ update.run(callback);
+ } finally {
+ callback.close();
+ }
++ return callback.getUpdateSummary();
+ }
+
+ /**
+ * Executes an update with a specific {@link WriteConcern}.
+ */
- public void executeUpdate(UpdateScript update, WriteConcern writeConcern) {
- executeUpdate(update, new SimpleWriteConcernAdvisor(writeConcern));
++ public UpdateSummary executeUpdate(UpdateScript update, WriteConcern writeConcern) {
++ return executeUpdate(update, new SimpleWriteConcernAdvisor(writeConcern));
+ }
+
+ @Override
- public void executeUpdate(UpdateScript update) {
- executeUpdate(update, getWriteConcernAdvisor());
++ public UpdateSummary executeUpdate(UpdateScript update) {
++ return executeUpdate(update, getWriteConcernAdvisor());
+ }
+
+ /**
+ * Gets the {@link WriteConcernAdvisor} to use on
+ * {@link #executeUpdate(UpdateScript)} calls.
+ */
+ public WriteConcernAdvisor getWriteConcernAdvisor() {
+ if (_writeConcernAdvisor == null) {
+ return new DefaultWriteConcernAdvisor();
+ }
+ return _writeConcernAdvisor;
+ }
+
+ /**
+ * Sets a global {@link WriteConcern} advisor to use on
+ * {@link #executeUpdate(UpdateScript)}.
+ */
+ public void setWriteConcernAdvisor(WriteConcernAdvisor writeConcernAdvisor) {
+ _writeConcernAdvisor = writeConcernAdvisor;
+ }
+
+ /**
+ * Gets the {@link DB} instance that this {@link DataContext} is backed by.
+ */
+ public DB getMongoDb() {
+ return _mongoDb;
+ }
+
+ protected void addTable(MutableTable table) {
+ if (_schema instanceof MutableSchema) {
+ MutableSchema mutableSchema = (MutableSchema) _schema;
+ mutableSchema.addTable(table);
+ } else {
+ throw new UnsupportedOperationException("Schema is not mutable");
+ }
+ }
+ }
http://git-wip-us.apache.org/repos/asf/metamodel/blob/6d50f0e3/mongodb/mongo3/pom.xml
----------------------------------------------------------------------
diff --cc mongodb/mongo3/pom.xml
index 0000000,ae53158..4ff4fd7
mode 000000,100644..100644
--- a/mongodb/mongo3/pom.xml
+++ b/mongodb/mongo3/pom.xml
@@@ -1,0 -1,70 +1,70 @@@
+ <?xml version="1.0" encoding="UTF-8" ?>
+ <!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+ -->
+ <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <artifactId>MetaModel-mongodb</artifactId>
+ <groupId>org.apache.metamodel</groupId>
- <version>4.5.3-SNAPSHOT</version>
++ <version>5.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>MetaModel-mongodb-mongo3</artifactId>
+ <name>MetaModel module for MongoDB 3.x</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.metamodel</groupId>
+ <artifactId>MetaModel-core</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.metamodel</groupId>
+ <artifactId>MetaModel-mongodb-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mongodb</groupId>
+ <artifactId>mongo-java-driver</artifactId>
+ <version>3.1.0</version>
+ </dependency>
+
+ <!-- Test dependencies -->
+ <dependency>
+ <groupId>org.apache.metamodel</groupId>
+ <artifactId>MetaModel-jdbc</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.derby</groupId>
+ <artifactId>derby</artifactId>
+ <version>10.8.1.2</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-nop</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ </project>
http://git-wip-us.apache.org/repos/asf/metamodel/blob/6d50f0e3/mongodb/mongo3/src/main/java/org/apache/metamodel/mongodb/mongo3/MongoDbDataContext.java
----------------------------------------------------------------------
diff --cc mongodb/mongo3/src/main/java/org/apache/metamodel/mongodb/mongo3/MongoDbDataContext.java
index 0000000,fbc9047..cf89f39
mode 000000,100644..100644
--- a/mongodb/mongo3/src/main/java/org/apache/metamodel/mongodb/mongo3/MongoDbDataContext.java
+++ b/mongodb/mongo3/src/main/java/org/apache/metamodel/mongodb/mongo3/MongoDbDataContext.java
@@@ -1,0 -1,545 +1,547 @@@
+ /**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ package org.apache.metamodel.mongodb.mongo3;
+
+ import java.util.ArrayList;
+ import java.util.Arrays;
+ import java.util.HashSet;
+ import java.util.List;
+ import java.util.Map.Entry;
+ import java.util.Set;
+ import java.util.SortedMap;
+ import java.util.TreeMap;
+ import java.util.regex.Pattern;
+
+ import org.apache.metamodel.DataContext;
+ import org.apache.metamodel.MetaModelException;
+ import org.apache.metamodel.QueryPostprocessDataContext;
+ import org.apache.metamodel.UpdateScript;
++import org.apache.metamodel.UpdateSummary;
+ import org.apache.metamodel.UpdateableDataContext;
+ import org.apache.metamodel.data.DataSet;
+ import org.apache.metamodel.data.DataSetHeader;
+ import org.apache.metamodel.data.InMemoryDataSet;
+ import org.apache.metamodel.data.Row;
+ import org.apache.metamodel.data.SimpleDataSetHeader;
+ import org.apache.metamodel.mongodb.common.MongoDBUtils;
+ import org.apache.metamodel.query.FilterItem;
+ import org.apache.metamodel.query.FromItem;
+ import org.apache.metamodel.query.OperatorType;
+ import org.apache.metamodel.query.Query;
+ import org.apache.metamodel.query.SelectItem;
+ import org.apache.metamodel.schema.Column;
+ import org.apache.metamodel.schema.ColumnType;
+ import org.apache.metamodel.schema.ColumnTypeImpl;
+ import org.apache.metamodel.schema.MutableColumn;
+ import org.apache.metamodel.schema.MutableSchema;
+ import org.apache.metamodel.schema.MutableTable;
+ import org.apache.metamodel.schema.Schema;
+ import org.apache.metamodel.schema.Table;
+ import org.apache.metamodel.util.SimpleTableDef;
+ import org.bson.Document;
+ import org.bson.types.ObjectId;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+
+ import com.mongodb.DB;
+ import com.mongodb.WriteConcern;
+ import com.mongodb.client.FindIterable;
+ import com.mongodb.client.MongoCollection;
+ import com.mongodb.client.MongoCursor;
+ import com.mongodb.client.MongoDatabase;
+ import com.mongodb.client.MongoIterable;
+
+ /**
+ * DataContext implementation for MongoDB.
+ *
+ * Since MongoDB has no schema, a virtual schema will be used in this
+ * DataContext. This implementation supports either automatic discovery of a
+ * schema or manual specification of a schema, through the
+ * {@link SimpleTableDef} class.
+ */
+ public class MongoDbDataContext extends QueryPostprocessDataContext implements UpdateableDataContext {
+
+ private static final Logger logger = LoggerFactory.getLogger(MongoDbDataSet.class);
+
+ private final MongoDatabase _mongoDb;
+ private final SimpleTableDef[] _tableDefs;
+ private WriteConcernAdvisor _writeConcernAdvisor;
+ private Schema _schema;
+
+ /**
+ * Constructs a {@link MongoDbDataContext}. This constructor accepts a
+ * custom array of {@link SimpleTableDef}s which allows the user to define
+ * his own view on the collections in the database.
+ *
+ * @param mongoDb
+ * the mongo db connection
+ * @param tableDefs
+ * an array of {@link SimpleTableDef}s, which define the table
+ * and column model of the mongo db collections. (consider using
+ * {@link #detectSchema(MongoDatabase)} or {@link #detectTable(MongoDatabase, String)}
+ * ).
+ */
+ public MongoDbDataContext(MongoDatabase mongoDb, SimpleTableDef... tableDefs) {
+ _mongoDb = mongoDb;
+ _tableDefs = tableDefs;
+ _schema = null;
+ }
+
+ /**
+ * Constructs a {@link MongoDbDataContext} and automatically detects the
+ * schema structure/view on all collections (see {@link #detectSchema(MongoDatabase)}).
+ *
+ * @param mongoDb
+ * the mongo db connection
+ */
+ public MongoDbDataContext(MongoDatabase mongoDb) {
+ this(mongoDb, detectSchema(mongoDb));
+ }
+
+ /**
+ * Performs an analysis of the available collections in a Mongo {@link DB}
+ * instance and tries to detect the table's structure based on the first
+ * 1000 documents in each collection.
+ *
+ * @param mongoDb
+ * the mongo db to inspect
+ * @return a mutable schema instance, useful for further fine tuning by the
+ * user.
+ * @see #detectTable(MongoDatabase, String)
+ */
+ public static SimpleTableDef[] detectSchema(MongoDatabase mongoDb) {
+ MongoIterable<String> collectionNames = mongoDb.listCollectionNames();
+ List<SimpleTableDef> result = new ArrayList<>();
+
+ for (String collectionName : collectionNames) {
+ SimpleTableDef table = detectTable(mongoDb, collectionName);
+ result.add(table);
+ }
+ return result.toArray(new SimpleTableDef[0]);
+ }
+
+ /**
+ * Performs an analysis of an available collection in a Mongo {@link DB}
+ * instance and tries to detect the table structure based on the first 1000
+ * documents in the collection.
+ *
+ * @param mongoDb
+ * the mongo DB
+ * @param collectionName
+ * the name of the collection
+ * @return a table definition for mongo db.
+ */
+ public static SimpleTableDef detectTable(MongoDatabase mongoDb, String collectionName) {
+
+ final MongoCollection<Document> collection = mongoDb.getCollection(collectionName);
+ final FindIterable<Document> iterable = collection.find().limit(1000);
+
+ final SortedMap<String, Set<Class<?>>> columnsAndTypes = new TreeMap<String, Set<Class<?>>>();
+ for (Document document : iterable) {
+ Set<String> keysInObject = document.keySet();
+ for (String key : keysInObject) {
+ Set<Class<?>> types = columnsAndTypes.get(key);
+ if (types == null) {
+ types = new HashSet<Class<?>>();
+ columnsAndTypes.put(key, types);
+ }
+ Object value = document.get(key);
+ if (value != null) {
+ types.add(value.getClass());
+ }
+ }
+ }
+
+ final String[] columnNames = new String[columnsAndTypes.size()];
+ final ColumnType[] columnTypes = new ColumnType[columnsAndTypes.size()];
+
+ int i = 0;
+ for (Entry<String, Set<Class<?>>> columnAndTypes : columnsAndTypes.entrySet()) {
+ final String columnName = columnAndTypes.getKey();
+ final Set<Class<?>> columnTypeSet = columnAndTypes.getValue();
+ final Class<?> columnType;
+ if (columnTypeSet.size() == 1) {
+ columnType = columnTypeSet.iterator().next();
+ } else {
+ columnType = Object.class;
+ }
+ columnNames[i] = columnName;
+ if (columnType == ObjectId.class) {
+ columnTypes[i] = ColumnType.ROWID;
+ } else {
+ columnTypes[i] = ColumnTypeImpl.convertColumnType(columnType);
+ }
+ i++;
+ }
+
+ return new SimpleTableDef(collectionName, columnNames, columnTypes);
+ }
+
+ @Override
+ protected Schema getMainSchema() throws MetaModelException {
+ if (_schema == null) {
+ MutableSchema schema = new MutableSchema(getMainSchemaName());
+ for (SimpleTableDef tableDef : _tableDefs) {
+
+ MutableTable table = tableDef.toTable().setSchema(schema);
+ Column[] rowIdColumns = table.getColumnsOfType(ColumnType.ROWID);
+ for (Column column : rowIdColumns) {
+ if (column instanceof MutableColumn) {
+ ((MutableColumn) column).setPrimaryKey(true);
+ }
+ }
+
+ schema.addTable(table);
+ }
+
+ _schema = schema;
+ }
+ return _schema;
+ }
+
+ @Override
+ protected String getMainSchemaName() throws MetaModelException {
+ return _mongoDb.getName();
+ }
+
+ @Override
+ protected Number executeCountQuery(Table table, List<FilterItem> whereItems, boolean functionApproximationAllowed) {
+ final MongoCollection<Document> collection = _mongoDb.getCollection(table.getName());
+
+ final Document query = createMongoDbQuery(table, whereItems);
+
+ logger.info("Executing MongoDB 'count' query: {}", query);
+ final long count = collection.count(query);
+
+ return count;
+ }
+
+ @Override
+ protected Row executePrimaryKeyLookupQuery(Table table, List<SelectItem> selectItems, Column primaryKeyColumn,
+ Object keyValue) {
+ final MongoCollection<Document> collection = _mongoDb.getCollection(table.getName());
+
+ List<FilterItem> whereItems = new ArrayList<FilterItem>();
+ SelectItem selectItem = new SelectItem(primaryKeyColumn);
+ FilterItem primaryKeyWhereItem = new FilterItem(selectItem, OperatorType.EQUALS_TO, keyValue);
+ whereItems.add(primaryKeyWhereItem);
+ final Document query = createMongoDbQuery(table, whereItems);
+ final Document resultDoc = collection.find(query).first();
+
+ DataSetHeader header = new SimpleDataSetHeader(selectItems);
+
+ Row row = MongoDBUtils.toRow(resultDoc, header);
+
+ return row;
+ }
+
+ @Override
+ public DataSet executeQuery(Query query) {
+ // Check for queries containing only simple selects and where clauses,
+ // or if it is a COUNT(*) query.
+
+ // if from clause only contains a main schema table
+ List<FromItem> fromItems = query.getFromClause().getItems();
+ if (fromItems.size() == 1 && fromItems.get(0).getTable() != null
+ && fromItems.get(0).getTable().getSchema() == _schema) {
+ final Table table = fromItems.get(0).getTable();
+
+ // if GROUP BY, HAVING and ORDER BY clauses are not specified
+ if (query.getGroupByClause().isEmpty() && query.getHavingClause().isEmpty()
+ && query.getOrderByClause().isEmpty()) {
+
+ final List<FilterItem> whereItems = query.getWhereClause().getItems();
+
+ // if all of the select items are "pure" column selection
+ boolean allSelectItemsAreColumns = true;
+ List<SelectItem> selectItems = query.getSelectClause().getItems();
+
+ // if it is a
+ // "SELECT [columns] FROM [table] WHERE [conditions]"
+ // query.
+ for (SelectItem selectItem : selectItems) {
+ if (selectItem.getFunction() != null || selectItem.getColumn() == null) {
+ allSelectItemsAreColumns = false;
+ break;
+ }
+ }
+
+ if (allSelectItemsAreColumns) {
+ logger.debug("Query can be expressed in full MongoDB, no post processing needed.");
+
+ // prepare for a non-post-processed query
+ Column[] columns = new Column[selectItems.size()];
+ for (int i = 0; i < columns.length; i++) {
+ columns[i] = selectItems.get(i).getColumn();
+ }
+
+ // checking if the query is a primary key lookup query
+ if (whereItems.size() == 1) {
+ final FilterItem whereItem = whereItems.get(0);
+ final SelectItem selectItem = whereItem.getSelectItem();
+ if (!whereItem.isCompoundFilter() && selectItem != null && selectItem.getColumn() != null) {
+ final Column column = selectItem.getColumn();
+ if (column.isPrimaryKey() && OperatorType.EQUALS_TO.equals(whereItem.getOperator())) {
+ logger.debug("Query is a primary key lookup query. Trying executePrimaryKeyLookupQuery(...)");
+ final Object operand = whereItem.getOperand();
+ final Row row = executePrimaryKeyLookupQuery(table, selectItems, column, operand);
+ if (row == null) {
+ logger.debug("DataContext did not return any primary key lookup query results. Proceeding "
+ + "with manual lookup.");
+ } else {
+ final DataSetHeader header = new SimpleDataSetHeader(selectItems);
+ return new InMemoryDataSet(header, row);
+ }
+ }
+ }
+ }
+
+ int firstRow = (query.getFirstRow() == null ? 1 : query.getFirstRow());
+ int maxRows = (query.getMaxRows() == null ? -1 : query.getMaxRows());
+
+ final DataSet dataSet = materializeMainSchemaTableInternal(table, columns, whereItems, firstRow,
+ maxRows, false);
+ return dataSet;
+ }
+ }
+ }
+
+ logger.debug("Query will be simplified for MongoDB and post processed.");
+ return super.executeQuery(query);
+ }
+
+ private DataSet materializeMainSchemaTableInternal(Table table, Column[] columns, List<FilterItem> whereItems,
+ int firstRow, int maxRows, boolean queryPostProcessed) {
+ final MongoCollection<Document> collection = _mongoDb.getCollection(table.getName());
+
+ final Document query = createMongoDbQuery(table, whereItems);
+
+ logger.info("Executing MongoDB 'find' query: {}", query);
+ FindIterable<Document> iterable = collection.find(query);
+
+ if (maxRows > 0) {
+ iterable = iterable.limit(maxRows);
+ }
+ if (firstRow > 1) {
+ final int skip = firstRow - 1;
+ iterable = iterable.skip(skip);
+ }
+
+ MongoCursor<Document> cursor = iterable.iterator();
+
+ return new MongoDbDataSet(cursor, columns, queryPostProcessed);
+ }
+
+ protected Document createMongoDbQuery(Table table, List<FilterItem> whereItems) {
+ assert _schema == table.getSchema();
+
+ final Document query = new Document();
+ if (whereItems != null && !whereItems.isEmpty()) {
+ for (FilterItem item : whereItems) {
+ convertToCursorObject(query, item);
+ }
+ }
+
+ return query;
+ }
+
+ private static Object convertArrayToList(Object arr) {
+ if (arr instanceof boolean[]) {
+ return Arrays.asList((boolean[])arr);
+ } else if (arr instanceof byte[]) {
+ return Arrays.asList((byte[])arr);
+ } else if (arr instanceof short[]) {
+ return Arrays.asList((short[])arr);
+ } else if (arr instanceof char[]) {
+ return Arrays.asList((char[])arr);
+ } else if (arr instanceof int[]) {
+ return Arrays.asList((int[])arr);
+ } else if (arr instanceof long[]) {
+ return Arrays.asList((long[])arr);
+ } else if (arr instanceof float[]) {
+ return Arrays.asList((float[])arr);
+ } else if (arr instanceof double[]) {
+ return Arrays.asList((double[])arr);
+ } else if (arr instanceof Object[]) {
+ return Arrays.asList((Object[])arr);
+ }
+ // It's not an array.
+ return null;
+ }
+
+ private void convertToCursorObject(Document query, FilterItem item) {
+ if (item.isCompoundFilter()) {
+
+ List<Document> orList = new ArrayList<Document>();
+
+ final FilterItem[] childItems = item.getChildItems();
+ for (FilterItem childItem : childItems) {
+ Document childDoc = new Document();
+ convertToCursorObject(childDoc, childItem);
+ orList.add(childDoc);
+ }
+
+ query.put("$or", orList);
+
+ } else {
+
+ final Column column = item.getSelectItem().getColumn();
+ final String columnName = column.getName();
+ final String operatorName = getOperatorName(item);
+
+ Object operand = item.getOperand();
+ if (ObjectId.isValid(String.valueOf(operand))) {
+ operand = new ObjectId(String.valueOf(operand));
+ } else if (operand != null && operand.getClass().isArray()){
+ operand = convertArrayToList(operand);
+ }
+
+ final Document existingFilterObject = (Document) query.get(columnName);
+ if (existingFilterObject == null) {
+ if (operatorName == null) {
+ if (OperatorType.LIKE.equals(item.getOperator())) {
+ query.put(columnName, turnOperandIntoRegExp(operand));
+ } else {
+ query.put(columnName, operand);
+ }
+ } else {
+ query.put(columnName, new Document(operatorName, operand));
+ }
+ } else {
+ if (operatorName == null) {
+ throw new IllegalStateException("Cannot retrieve records for a column with two EQUALS_TO operators");
+ } else {
+ existingFilterObject.append(operatorName, operand);
+ }
+ }
+ }
+ }
+
+ private String getOperatorName(FilterItem item) {
+ final OperatorType operator = item.getOperator();
+
+ if (OperatorType.EQUALS_TO.equals(operator)) {
+ return null;
+ }
+ if (OperatorType.LIKE.equals(operator)) {
+ return null;
+ }
+ if (OperatorType.LESS_THAN.equals(operator)) {
+ return "$lt";
+ }
+ if (OperatorType.LESS_THAN_OR_EQUAL.equals(operator)) {
+ return "$lte";
+ }
+ if (OperatorType.GREATER_THAN.equals(operator)) {
+ return "$gt";
+ }
+ if (OperatorType.GREATER_THAN_OR_EQUAL.equals(operator)) {
+ return "$gte";
+ }
+ if (OperatorType.DIFFERENT_FROM.equals(operator)) {
+ return "$ne";
+ }
+ if (OperatorType.IN.equals(operator)) {
+ return "$in";
+ }
+
+ throw new IllegalStateException("Unsupported operator type: " + operator);
+ }
+
+ private Pattern turnOperandIntoRegExp(Object operand) {
+ StringBuilder operandAsRegExp = new StringBuilder(replaceWildCardLikeChars(operand.toString()));
+ operandAsRegExp.insert(0, "^").append("$");
+ return Pattern.compile(operandAsRegExp.toString(), Pattern.CASE_INSENSITIVE);
+ }
+
+ private String replaceWildCardLikeChars(String operand) {
+ return operand.replaceAll("%", ".*");
+ }
+
+ @Override
+ protected DataSet materializeMainSchemaTable(Table table, Column[] columns, int maxRows) {
+ return materializeMainSchemaTableInternal(table, columns, null, 1, maxRows, true);
+ }
+
+ @Override
+ protected DataSet materializeMainSchemaTable(Table table, Column[] columns, int firstRow, int maxRows) {
+ return materializeMainSchemaTableInternal(table, columns, null, firstRow, maxRows, true);
+ }
+
+ /**
+ * Executes an update with a specific {@link WriteConcernAdvisor}.
+ */
- public void executeUpdate(UpdateScript update, WriteConcernAdvisor writeConcernAdvisor) {
++ public UpdateSummary executeUpdate(UpdateScript update, WriteConcernAdvisor writeConcernAdvisor) {
+ MongoDbUpdateCallback callback = new MongoDbUpdateCallback(this, writeConcernAdvisor);
+ try {
+ update.run(callback);
+ } finally {
+ callback.close();
+ }
++ return callback.getUpdateSummary();
+ }
+
+ /**
+ * Executes an update with a specific {@link WriteConcern}.
+ */
- public void executeUpdate(UpdateScript update, WriteConcern writeConcern) {
- executeUpdate(update, new SimpleWriteConcernAdvisor(writeConcern));
++ public UpdateSummary executeUpdate(UpdateScript update, WriteConcern writeConcern) {
++ return executeUpdate(update, new SimpleWriteConcernAdvisor(writeConcern));
+ }
+
+ @Override
- public void executeUpdate(UpdateScript update) {
- executeUpdate(update, getWriteConcernAdvisor());
++ public UpdateSummary executeUpdate(UpdateScript update) {
++ return executeUpdate(update, getWriteConcernAdvisor());
+ }
+
+ /**
+ * Gets the {@link WriteConcernAdvisor} to use on
+ * {@link #executeUpdate(UpdateScript)} calls.
+ */
+ public WriteConcernAdvisor getWriteConcernAdvisor() {
+ if (_writeConcernAdvisor == null) {
+ return new DefaultWriteConcernAdvisor();
+ }
+ return _writeConcernAdvisor;
+ }
+
+ /**
+ * Sets a global {@link WriteConcern} advisor to use on
+ * {@link #executeUpdate(UpdateScript)}.
+ */
+ public void setWriteConcernAdvisor(WriteConcernAdvisor writeConcernAdvisor) {
+ _writeConcernAdvisor = writeConcernAdvisor;
+ }
+
+ /**
+ * Gets the {@link DB} instance that this {@link DataContext} is backed by.
+ * @return
+ */
+ public MongoDatabase getMongoDb() {
+ return _mongoDb;
+ }
+
+ protected void addTable(MutableTable table) {
+ if (_schema instanceof MutableSchema) {
+ MutableSchema mutableSchema = (MutableSchema) _schema;
+ mutableSchema.addTable(table);
+ } else {
+ throw new UnsupportedOperationException("Schema is not mutable");
+ }
+ }
+ }
http://git-wip-us.apache.org/repos/asf/metamodel/blob/6d50f0e3/mongodb/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/metamodel/blob/6d50f0e3/pom.xml
----------------------------------------------------------------------