You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@flink.apache.org by GitBox <gi...@apache.org> on 2022/07/25 07:51:13 UTC

[GitHub] [flink-ml] zhipeng93 commented on a diff in pull request #132: [FLINK-28571]Add Chi-squared test as Transformer to ml.feature

zhipeng93 commented on code in PR #132:
URL: https://github.com/apache/flink-ml/pull/132#discussion_r928507111


##########
flink-ml-lib/src/main/java/org/apache/flink/ml/stats/chisqtest/ChiSqTest.java:
##########
@@ -0,0 +1,492 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.stats.chisqtest;
+
+import org.apache.flink.api.common.functions.MapPartitionFunction;
+import org.apache.flink.api.common.functions.RichFlatMapFunction;
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.functions.RichMapPartitionFunction;
+import org.apache.flink.api.common.typeinfo.TypeInformation;
+import org.apache.flink.api.common.typeinfo.Types;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.api.java.tuple.Tuple4;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.AlgoOperator;
+import org.apache.flink.ml.common.broadcast.BroadcastUtils;
+import org.apache.flink.ml.common.datastream.DataStreamUtils;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
+import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
+import org.apache.flink.streaming.api.operators.BoundedOneInput;
+import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
+import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.math3.distribution.ChiSquaredDistribution;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+/**
+ * Chi-square test of independence of variables in a contingency table. This Transformer computes
+ * the chi-square statistic,p-value,and dof(number of degrees of freedom) for every feature in the
+ * contingency table, which constructed from the `observed` for each categorical values. All label
+ * and feature values must be categorical.
+ *
+ * <p>See: http://en.wikipedia.org/wiki/Chi-squared_test.
+ */
+public class ChiSqTest implements AlgoOperator<ChiSqTest>, ChiSqTestParams<ChiSqTest> {
+
+    final String bcCategoricalMarginsKey = "bcCategoricalMarginsKey";
+    final String bcLabelMarginsKey = "bcLabelMarginsKey";
+
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+
+    public ChiSqTest() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public Table[] transform(Table... inputs) {
+
+        final String[] inputCols = getInputCols();

Review Comment:
   Could we check the size of the `inputs` here?



##########
flink-ml-lib/src/test/java/org/apache/flink/ml/stats/ChiSqTestTest.java:
##########
@@ -0,0 +1,205 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.stats;
+
+import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.ml.stats.chisqtest.ChiSqTest;
+import org.apache.flink.ml.util.TestUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.ExecutionCheckpointingOptions;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.test.util.AbstractTestBase;
+import org.apache.flink.types.Row;
+
+import org.apache.commons.collections.IteratorUtils;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+/** Tests {@link ChiSqTestTest}. */

Review Comment:
   nit: should be `Tests {@link ChiSqlTest}.`



##########
flink-ml-lib/src/test/java/org/apache/flink/ml/stats/ChiSqTestTest.java:
##########
@@ -0,0 +1,205 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.stats;
+
+import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.ml.stats.chisqtest.ChiSqTest;
+import org.apache.flink.ml.util.TestUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.ExecutionCheckpointingOptions;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.test.util.AbstractTestBase;
+import org.apache.flink.types.Row;
+
+import org.apache.commons.collections.IteratorUtils;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+/** Tests {@link ChiSqTestTest}. */
+public class ChiSqTestTest extends AbstractTestBase {
+    @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();
+    private StreamTableEnvironment tEnv;
+    private Table inputTableWithDoubleLabel;
+    private Table inputTableWithIntegerLabel;
+    private Table inputTableWithStringLabel;
+
+    private final List<Row> samplesWithDoubleLabel =
+            Arrays.asList(
+                    Row.of(0., 5, 1.),
+                    Row.of(2., 6, 2.),
+                    Row.of(1., 7, 2.),
+                    Row.of(1., 5, 4.),
+                    Row.of(0., 5, 1.),
+                    Row.of(2., 6, 2.),
+                    Row.of(1., 7, 2.),
+                    Row.of(1., 5, 4.),
+                    Row.of(2., 5, 1.),
+                    Row.of(0., 5, 2.),
+                    Row.of(0., 5, 2.),
+                    Row.of(1., 9, 4.),
+                    Row.of(1., 9, 3.));
+
+    private final List<Row> expectedChiSqTestResultWithDoubleLabel =
+            Arrays.asList(
+                    Row.of("f1", 0.03419350755, 13.61904761905, 6),
+                    Row.of("f2", 0.24220177737, 7.94444444444, 6));
+
+    private final List<Row> samplesWithIntegerLabel =
+            Arrays.asList(
+                    Row.of(33, 5, "a"),
+                    Row.of(44, 6, "b"),
+                    Row.of(55, 7, "b"),
+                    Row.of(11, 5, "b"),
+                    Row.of(11, 5, "a"),
+                    Row.of(33, 6, "c"),
+                    Row.of(22, 7, "c"),
+                    Row.of(66, 5, "d"),
+                    Row.of(77, 5, "d"),
+                    Row.of(88, 5, "f"),
+                    Row.of(77, 5, "h"),
+                    Row.of(44, 9, "h"),
+                    Row.of(11, 9, "j"));
+
+    private final List<Row> expectedChiSqTestResultWithIntegerLabel =
+            Arrays.asList(
+                    Row.of("f1", 0.35745138256, 22.75, 21),
+                    Row.of("f2", 0.39934987096, 43.69444444444, 42));
+
+    private final List<Row> samplesWithStringLabel =
+            Arrays.asList(
+                    Row.of("v1", 11, 21.22),
+                    Row.of("v1", 33, 22.33),
+                    Row.of("v2", 22, 32.44),
+                    Row.of("v3", 11, 54.22),
+                    Row.of("v3", 33, 22.22),
+                    Row.of("v2", 22, 22.22),
+                    Row.of("v4", 55, 22.22),
+                    Row.of("v5", 11, 41.11),
+                    Row.of("v6", 55, 14.41),
+                    Row.of("v7", 13, 25.55),
+                    Row.of("v8", 14, 25.55),
+                    Row.of("v9", 14, 44.44),
+                    Row.of("v9", 14, 31.11));
+
+    private final List<Row> expectedChiSqTestResultWithStringLabel =
+            Arrays.asList(
+                    Row.of("f1", 0.06672255089, 54.16666666667, 40),
+                    Row.of("f2", 0.42335512313, 73.66666666667, 72));
+
+    @Before
+    public void before() {
+        Configuration config = new Configuration();
+        config.set(ExecutionCheckpointingOptions.ENABLE_CHECKPOINTS_AFTER_TASKS_FINISH, true);
+        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(config);
+        env.setParallelism(4);
+        env.enableCheckpointing(100);
+        env.setRestartStrategy(RestartStrategies.noRestart());
+        tEnv = StreamTableEnvironment.create(env);
+        inputTableWithDoubleLabel =
+                tEnv.fromDataStream(env.fromCollection(samplesWithDoubleLabel))
+                        .as("label", "f1", "f2");
+        inputTableWithIntegerLabel =
+                tEnv.fromDataStream(env.fromCollection(samplesWithIntegerLabel))
+                        .as("label", "f1", "f2");
+        inputTableWithStringLabel =
+                tEnv.fromDataStream(env.fromCollection(samplesWithStringLabel))
+                        .as("label", "f1", "f2");
+    }
+
+    private static void verifyPredictionResult(Table output, List<Row> expected) throws Exception {
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) output).getTableEnvironment();
+        DataStream<Row> outputDataStream = tEnv.toDataStream(output);
+
+        List<Row> result = IteratorUtils.toList(outputDataStream.executeAndCollect());
+
+        compareResultCollections(
+                expected,
+                result,
+                (row1, row2) -> {
+                    if (!row1.equals(row2)) {
+                        return 1;
+                    } else {
+                        return 0;
+                    }
+                });
+    }
+
+    @Test
+    public void testParam() {
+        ChiSqTest chiSqTest = new ChiSqTest();
+
+        chiSqTest.setInputCols("f1", "f2").setLabelCol("label");
+
+        assertArrayEquals(new String[] {"f1", "f2"}, chiSqTest.getInputCols());
+        assertEquals("label", chiSqTest.getLabelCol());
+    }
+
+    @Test
+    public void testOutputSchema() {
+        ChiSqTest chiSqTest = new ChiSqTest().setInputCols("f1", "f2").setLabelCol("label");
+
+        Table output = chiSqTest.transform(inputTableWithDoubleLabel)[0];
+        assertEquals(
+                Arrays.asList("column", "pValue", "statistic", "degreesOfFreedom"),
+                output.getResolvedSchema().getColumnNames());
+    }
+
+    @Test
+    public void testTransform() throws Exception {
+        ChiSqTest chiSqTest = new ChiSqTest().setInputCols("f1", "f2").setLabelCol("label");
+
+        Table output1 = chiSqTest.transform(inputTableWithDoubleLabel)[0];
+        verifyPredictionResult(output1, expectedChiSqTestResultWithDoubleLabel);
+
+        Table output2 = chiSqTest.transform(inputTableWithIntegerLabel)[0];
+        verifyPredictionResult(output2, expectedChiSqTestResultWithIntegerLabel);
+
+        Table output3 = chiSqTest.transform(inputTableWithStringLabel)[0];
+        verifyPredictionResult(output3, expectedChiSqTestResultWithStringLabel);
+    }
+
+    @Test
+    public void testSaveLoadAndTransform() throws Exception {
+        ChiSqTest chiSqTest = new ChiSqTest().setInputCols("f1", "f2").setLabelCol("label");
+
+        ChiSqTest loadedBucketizer =

Review Comment:
   nit: `loadedBucketizer` to `loadedChiSqTest`



##########
flink-ml-lib/src/main/java/org/apache/flink/ml/stats/chisqtest/ChiSqTest.java:
##########
@@ -0,0 +1,492 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.stats.chisqtest;
+
+import org.apache.flink.api.common.functions.MapPartitionFunction;
+import org.apache.flink.api.common.functions.RichFlatMapFunction;
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.functions.RichMapPartitionFunction;
+import org.apache.flink.api.common.typeinfo.TypeInformation;
+import org.apache.flink.api.common.typeinfo.Types;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.api.java.tuple.Tuple4;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.AlgoOperator;
+import org.apache.flink.ml.common.broadcast.BroadcastUtils;
+import org.apache.flink.ml.common.datastream.DataStreamUtils;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
+import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
+import org.apache.flink.streaming.api.operators.BoundedOneInput;
+import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
+import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.math3.distribution.ChiSquaredDistribution;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+/**
+ * Chi-square test of independence of variables in a contingency table. This Transformer computes
+ * the chi-square statistic,p-value,and dof(number of degrees of freedom) for every feature in the
+ * contingency table, which constructed from the `observed` for each categorical values. All label
+ * and feature values must be categorical.
+ *
+ * <p>See: http://en.wikipedia.org/wiki/Chi-squared_test.
+ */
+public class ChiSqTest implements AlgoOperator<ChiSqTest>, ChiSqTestParams<ChiSqTest> {
+
+    final String bcCategoricalMarginsKey = "bcCategoricalMarginsKey";
+    final String bcLabelMarginsKey = "bcLabelMarginsKey";
+
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+
+    public ChiSqTest() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public Table[] transform(Table... inputs) {
+
+        final String[] inputCols = getInputCols();
+        String labelCol = getLabelCol();
+
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) inputs[0]).getTableEnvironment();
+
+        SingleOutputStreamOperator<Tuple3<String, Object, Object>> colAndFeatureAndLabel =
+                tEnv.toDataStream(inputs[0])
+                        .flatMap(new ExtractColAndFeatureAndLabel(inputCols, labelCol));
+
+        // compute the observed frequencies
+        DataStream<Tuple4<String, Object, Object, Long>> observedFreq =
+                DataStreamUtils.mapPartition(
+                        colAndFeatureAndLabel.keyBy(Tuple3::hashCode),
+                        new GenerateObservedFrequencies());
+
+        SingleOutputStreamOperator<Tuple4<String, Object, Object, Long>> filledObservedFreq =
+                observedFreq
+                        .transform(
+                                "filledObservedFreq",
+                                Types.TUPLE(
+                                        Types.STRING,
+                                        Types.GENERIC(Object.class),
+                                        Types.GENERIC(Object.class),
+                                        Types.LONG),
+                                new FillZeroFunc())
+                        .setParallelism(1);
+
+        // return a DataStream of the marginal sums of the factors
+        DataStream<Tuple3<String, Object, Long>> categoricalMargins =
+                DataStreamUtils.mapPartition(
+                        observedFreq.keyBy(tuple -> new Tuple2<>(tuple.f0, tuple.f1).hashCode()),
+                        new MapPartitionFunction<
+                                Tuple4<String, Object, Object, Long>,
+                                Tuple3<String, Object, Long>>() {
+                            @Override
+                            public void mapPartition(
+                                    Iterable<Tuple4<String, Object, Object, Long>> iterable,
+                                    Collector<Tuple3<String, Object, Long>> out) {
+                                HashMap<Tuple2<String, Object>, Long> map = new HashMap<>();
+
+                                for (Tuple4<String, Object, Object, Long> tuple : iterable) {
+                                    Long observedFreq = tuple.f3;
+                                    Tuple2<String, Object> key = new Tuple2<>(tuple.f0, tuple.f1);
+
+                                    if (map.containsKey(key)) {
+                                        Long count = map.get(key);
+                                        map.put(key, count + observedFreq);
+                                    } else {
+                                        map.put(key, observedFreq);
+                                    }
+                                }
+
+                                for (Tuple2<String, Object> key : map.keySet()) {
+                                    Long categoricalMargin = map.get(key);
+                                    out.collect(new Tuple3<>(key.f0, key.f1, categoricalMargin));
+                                }
+                            }
+                        });
+
+        // return a DataStream of the marginal sums of the labels
+        DataStream<Tuple3<String, Object, Long>> labelMargins =

Review Comment:
   Same as above.



##########
flink-ml-lib/src/main/java/org/apache/flink/ml/stats/chisqtest/ChiSqTest.java:
##########
@@ -0,0 +1,492 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.stats.chisqtest;
+
+import org.apache.flink.api.common.functions.MapPartitionFunction;
+import org.apache.flink.api.common.functions.RichFlatMapFunction;
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.functions.RichMapPartitionFunction;
+import org.apache.flink.api.common.typeinfo.TypeInformation;
+import org.apache.flink.api.common.typeinfo.Types;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.api.java.tuple.Tuple4;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.AlgoOperator;
+import org.apache.flink.ml.common.broadcast.BroadcastUtils;
+import org.apache.flink.ml.common.datastream.DataStreamUtils;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
+import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
+import org.apache.flink.streaming.api.operators.BoundedOneInput;
+import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
+import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.math3.distribution.ChiSquaredDistribution;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+/**
+ * Chi-square test of independence of variables in a contingency table. This Transformer computes

Review Comment:
   Could you reformat the Java doc as `An AlgOperator that...` such that the java doc could be consistent with existing implememtations?
   
   Also `This Transformer` -> `This AlgoOperator`



##########
flink-ml-lib/src/test/java/org/apache/flink/ml/stats/ChiSqTestTest.java:
##########
@@ -0,0 +1,205 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.stats;
+
+import org.apache.flink.api.common.restartstrategy.RestartStrategies;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.ml.stats.chisqtest.ChiSqTest;
+import org.apache.flink.ml.util.TestUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.ExecutionCheckpointingOptions;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.test.util.AbstractTestBase;
+import org.apache.flink.types.Row;
+
+import org.apache.commons.collections.IteratorUtils;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+/** Tests {@link ChiSqTestTest}. */
+public class ChiSqTestTest extends AbstractTestBase {
+    @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();
+    private StreamTableEnvironment tEnv;
+    private Table inputTableWithDoubleLabel;
+    private Table inputTableWithIntegerLabel;
+    private Table inputTableWithStringLabel;
+
+    private final List<Row> samplesWithDoubleLabel =
+            Arrays.asList(
+                    Row.of(0., 5, 1.),
+                    Row.of(2., 6, 2.),
+                    Row.of(1., 7, 2.),
+                    Row.of(1., 5, 4.),
+                    Row.of(0., 5, 1.),
+                    Row.of(2., 6, 2.),
+                    Row.of(1., 7, 2.),
+                    Row.of(1., 5, 4.),
+                    Row.of(2., 5, 1.),
+                    Row.of(0., 5, 2.),
+                    Row.of(0., 5, 2.),
+                    Row.of(1., 9, 4.),
+                    Row.of(1., 9, 3.));
+
+    private final List<Row> expectedChiSqTestResultWithDoubleLabel =
+            Arrays.asList(
+                    Row.of("f1", 0.03419350755, 13.61904761905, 6),
+                    Row.of("f2", 0.24220177737, 7.94444444444, 6));
+
+    private final List<Row> samplesWithIntegerLabel =
+            Arrays.asList(
+                    Row.of(33, 5, "a"),
+                    Row.of(44, 6, "b"),
+                    Row.of(55, 7, "b"),
+                    Row.of(11, 5, "b"),
+                    Row.of(11, 5, "a"),
+                    Row.of(33, 6, "c"),
+                    Row.of(22, 7, "c"),
+                    Row.of(66, 5, "d"),
+                    Row.of(77, 5, "d"),
+                    Row.of(88, 5, "f"),
+                    Row.of(77, 5, "h"),
+                    Row.of(44, 9, "h"),
+                    Row.of(11, 9, "j"));
+
+    private final List<Row> expectedChiSqTestResultWithIntegerLabel =
+            Arrays.asList(
+                    Row.of("f1", 0.35745138256, 22.75, 21),
+                    Row.of("f2", 0.39934987096, 43.69444444444, 42));
+
+    private final List<Row> samplesWithStringLabel =
+            Arrays.asList(
+                    Row.of("v1", 11, 21.22),
+                    Row.of("v1", 33, 22.33),
+                    Row.of("v2", 22, 32.44),
+                    Row.of("v3", 11, 54.22),
+                    Row.of("v3", 33, 22.22),
+                    Row.of("v2", 22, 22.22),
+                    Row.of("v4", 55, 22.22),
+                    Row.of("v5", 11, 41.11),
+                    Row.of("v6", 55, 14.41),
+                    Row.of("v7", 13, 25.55),
+                    Row.of("v8", 14, 25.55),
+                    Row.of("v9", 14, 44.44),
+                    Row.of("v9", 14, 31.11));
+
+    private final List<Row> expectedChiSqTestResultWithStringLabel =
+            Arrays.asList(
+                    Row.of("f1", 0.06672255089, 54.16666666667, 40),
+                    Row.of("f2", 0.42335512313, 73.66666666667, 72));
+
+    @Before
+    public void before() {
+        Configuration config = new Configuration();
+        config.set(ExecutionCheckpointingOptions.ENABLE_CHECKPOINTS_AFTER_TASKS_FINISH, true);
+        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(config);
+        env.setParallelism(4);
+        env.enableCheckpointing(100);
+        env.setRestartStrategy(RestartStrategies.noRestart());
+        tEnv = StreamTableEnvironment.create(env);
+        inputTableWithDoubleLabel =
+                tEnv.fromDataStream(env.fromCollection(samplesWithDoubleLabel))
+                        .as("label", "f1", "f2");
+        inputTableWithIntegerLabel =
+                tEnv.fromDataStream(env.fromCollection(samplesWithIntegerLabel))
+                        .as("label", "f1", "f2");
+        inputTableWithStringLabel =
+                tEnv.fromDataStream(env.fromCollection(samplesWithStringLabel))
+                        .as("label", "f1", "f2");
+    }
+
+    private static void verifyPredictionResult(Table output, List<Row> expected) throws Exception {
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) output).getTableEnvironment();
+        DataStream<Row> outputDataStream = tEnv.toDataStream(output);
+
+        List<Row> result = IteratorUtils.toList(outputDataStream.executeAndCollect());
+
+        compareResultCollections(
+                expected,
+                result,
+                (row1, row2) -> {
+                    if (!row1.equals(row2)) {
+                        return 1;
+                    } else {
+                        return 0;
+                    }
+                });
+    }
+
+    @Test
+    public void testParam() {
+        ChiSqTest chiSqTest = new ChiSqTest();
+
+        chiSqTest.setInputCols("f1", "f2").setLabelCol("label");
+
+        assertArrayEquals(new String[] {"f1", "f2"}, chiSqTest.getInputCols());
+        assertEquals("label", chiSqTest.getLabelCol());
+    }
+
+    @Test
+    public void testOutputSchema() {
+        ChiSqTest chiSqTest = new ChiSqTest().setInputCols("f1", "f2").setLabelCol("label");
+
+        Table output = chiSqTest.transform(inputTableWithDoubleLabel)[0];
+        assertEquals(
+                Arrays.asList("column", "pValue", "statistic", "degreesOfFreedom"),
+                output.getResolvedSchema().getColumnNames());
+    }
+
+    @Test
+    public void testTransform() throws Exception {
+        ChiSqTest chiSqTest = new ChiSqTest().setInputCols("f1", "f2").setLabelCol("label");
+
+        Table output1 = chiSqTest.transform(inputTableWithDoubleLabel)[0];
+        verifyPredictionResult(output1, expectedChiSqTestResultWithDoubleLabel);
+
+        Table output2 = chiSqTest.transform(inputTableWithIntegerLabel)[0];
+        verifyPredictionResult(output2, expectedChiSqTestResultWithIntegerLabel);
+
+        Table output3 = chiSqTest.transform(inputTableWithStringLabel)[0];
+        verifyPredictionResult(output3, expectedChiSqTestResultWithStringLabel);
+    }
+
+    @Test
+    public void testSaveLoadAndTransform() throws Exception {
+        ChiSqTest chiSqTest = new ChiSqTest().setInputCols("f1", "f2").setLabelCol("label");
+
+        ChiSqTest loadedBucketizer =
+                TestUtils.saveAndReload(tEnv, chiSqTest, tempFolder.newFolder().getAbsolutePath());
+        Table output1 = loadedBucketizer.transform(inputTableWithDoubleLabel)[0];
+        verifyPredictionResult(output1, expectedChiSqTestResultWithDoubleLabel);
+
+        Table output2 = loadedBucketizer.transform(inputTableWithIntegerLabel)[0];

Review Comment:
   nit: I think one input for `testSaveLoadAndTransform` is enough since different inputs are already tested in `testTransform`.



##########
flink-ml-lib/src/main/java/org/apache/flink/ml/stats/chisqtest/ChiSqTest.java:
##########
@@ -0,0 +1,492 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.stats.chisqtest;
+
+import org.apache.flink.api.common.functions.MapPartitionFunction;
+import org.apache.flink.api.common.functions.RichFlatMapFunction;
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.functions.RichMapPartitionFunction;
+import org.apache.flink.api.common.typeinfo.TypeInformation;
+import org.apache.flink.api.common.typeinfo.Types;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.api.java.tuple.Tuple4;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.AlgoOperator;
+import org.apache.flink.ml.common.broadcast.BroadcastUtils;
+import org.apache.flink.ml.common.datastream.DataStreamUtils;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
+import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
+import org.apache.flink.streaming.api.operators.BoundedOneInput;
+import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
+import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.math3.distribution.ChiSquaredDistribution;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+/**
+ * Chi-square test of independence of variables in a contingency table. This Transformer computes
+ * the chi-square statistic,p-value,and dof(number of degrees of freedom) for every feature in the
+ * contingency table, which constructed from the `observed` for each categorical values. All label
+ * and feature values must be categorical.
+ *
+ * <p>See: http://en.wikipedia.org/wiki/Chi-squared_test.
+ */
+public class ChiSqTest implements AlgoOperator<ChiSqTest>, ChiSqTestParams<ChiSqTest> {
+
+    final String bcCategoricalMarginsKey = "bcCategoricalMarginsKey";

Review Comment:
   Could `bcCategoricalMarginsKey` and  `bcLabelMarginsKey` be two local variables rather than class variables?



##########
flink-ml-lib/src/main/java/org/apache/flink/ml/stats/chisqtest/ChiSqTest.java:
##########
@@ -0,0 +1,492 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.stats.chisqtest;
+
+import org.apache.flink.api.common.functions.MapPartitionFunction;
+import org.apache.flink.api.common.functions.RichFlatMapFunction;
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.functions.RichMapPartitionFunction;
+import org.apache.flink.api.common.typeinfo.TypeInformation;
+import org.apache.flink.api.common.typeinfo.Types;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.api.java.tuple.Tuple4;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.AlgoOperator;
+import org.apache.flink.ml.common.broadcast.BroadcastUtils;
+import org.apache.flink.ml.common.datastream.DataStreamUtils;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
+import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
+import org.apache.flink.streaming.api.operators.BoundedOneInput;
+import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
+import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.math3.distribution.ChiSquaredDistribution;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+/**
+ * Chi-square test of independence of variables in a contingency table. This Transformer computes
+ * the chi-square statistic,p-value,and dof(number of degrees of freedom) for every feature in the
+ * contingency table, which constructed from the `observed` for each categorical values. All label
+ * and feature values must be categorical.
+ *
+ * <p>See: http://en.wikipedia.org/wiki/Chi-squared_test.
+ */
+public class ChiSqTest implements AlgoOperator<ChiSqTest>, ChiSqTestParams<ChiSqTest> {
+
+    final String bcCategoricalMarginsKey = "bcCategoricalMarginsKey";
+    final String bcLabelMarginsKey = "bcLabelMarginsKey";
+
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+
+    public ChiSqTest() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public Table[] transform(Table... inputs) {
+
+        final String[] inputCols = getInputCols();
+        String labelCol = getLabelCol();
+
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) inputs[0]).getTableEnvironment();
+
+        SingleOutputStreamOperator<Tuple3<String, Object, Object>> colAndFeatureAndLabel =
+                tEnv.toDataStream(inputs[0])
+                        .flatMap(new ExtractColAndFeatureAndLabel(inputCols, labelCol));
+
+        // compute the observed frequencies
+        DataStream<Tuple4<String, Object, Object, Long>> observedFreq =
+                DataStreamUtils.mapPartition(
+                        colAndFeatureAndLabel.keyBy(Tuple3::hashCode),
+                        new GenerateObservedFrequencies());
+
+        SingleOutputStreamOperator<Tuple4<String, Object, Object, Long>> filledObservedFreq =
+                observedFreq
+                        .transform(
+                                "filledObservedFreq",
+                                Types.TUPLE(
+                                        Types.STRING,
+                                        Types.GENERIC(Object.class),
+                                        Types.GENERIC(Object.class),
+                                        Types.LONG),
+                                new FillZeroFunc())
+                        .setParallelism(1);
+
+        // return a DataStream of the marginal sums of the factors
+        DataStream<Tuple3<String, Object, Long>> categoricalMargins =
+                DataStreamUtils.mapPartition(

Review Comment:
   How about using `keyBy + reduce`?  We only use `mapPartitions` when necessary.



##########
flink-ml-lib/src/main/java/org/apache/flink/ml/stats/chisqtest/ChiSqTest.java:
##########
@@ -0,0 +1,492 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.stats.chisqtest;
+
+import org.apache.flink.api.common.functions.MapPartitionFunction;
+import org.apache.flink.api.common.functions.RichFlatMapFunction;
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.functions.RichMapPartitionFunction;
+import org.apache.flink.api.common.typeinfo.TypeInformation;
+import org.apache.flink.api.common.typeinfo.Types;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.api.java.tuple.Tuple4;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.AlgoOperator;
+import org.apache.flink.ml.common.broadcast.BroadcastUtils;
+import org.apache.flink.ml.common.datastream.DataStreamUtils;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
+import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
+import org.apache.flink.streaming.api.operators.BoundedOneInput;
+import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
+import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.math3.distribution.ChiSquaredDistribution;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+/**
+ * Chi-square test of independence of variables in a contingency table. This Transformer computes
+ * the chi-square statistic,p-value,and dof(number of degrees of freedom) for every feature in the
+ * contingency table, which constructed from the `observed` for each categorical values. All label
+ * and feature values must be categorical.
+ *
+ * <p>See: http://en.wikipedia.org/wiki/Chi-squared_test.
+ */
+public class ChiSqTest implements AlgoOperator<ChiSqTest>, ChiSqTestParams<ChiSqTest> {
+
+    final String bcCategoricalMarginsKey = "bcCategoricalMarginsKey";
+    final String bcLabelMarginsKey = "bcLabelMarginsKey";
+
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+
+    public ChiSqTest() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public Table[] transform(Table... inputs) {
+
+        final String[] inputCols = getInputCols();
+        String labelCol = getLabelCol();
+
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) inputs[0]).getTableEnvironment();
+
+        SingleOutputStreamOperator<Tuple3<String, Object, Object>> colAndFeatureAndLabel =
+                tEnv.toDataStream(inputs[0])
+                        .flatMap(new ExtractColAndFeatureAndLabel(inputCols, labelCol));
+
+        // compute the observed frequencies
+        DataStream<Tuple4<String, Object, Object, Long>> observedFreq =
+                DataStreamUtils.mapPartition(
+                        colAndFeatureAndLabel.keyBy(Tuple3::hashCode),
+                        new GenerateObservedFrequencies());

Review Comment:
   Given that we only need to access each input Tuple once, how about we use `transform()` and `BoundedOneInput#endInput()` to optimize the implementation following [1] here?
   
   Note that `mapPartition` caches the data in memory/disk and is often expensive. We could have a more efficient implementation here.
   
   
   [1] https://github.com/apache/flink-ml/blob/f9f802125d604f0155221804237fd4140e239602/flink-ml-lib/src/main/java/org/apache/flink/ml/feature/stringindexer/StringIndexer.java#L140



##########
flink-ml-lib/src/main/java/org/apache/flink/ml/stats/chisqtest/ChiSqTest.java:
##########
@@ -0,0 +1,492 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.stats.chisqtest;
+
+import org.apache.flink.api.common.functions.MapPartitionFunction;
+import org.apache.flink.api.common.functions.RichFlatMapFunction;
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.functions.RichMapPartitionFunction;
+import org.apache.flink.api.common.typeinfo.TypeInformation;
+import org.apache.flink.api.common.typeinfo.Types;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.api.java.tuple.Tuple4;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.AlgoOperator;
+import org.apache.flink.ml.common.broadcast.BroadcastUtils;
+import org.apache.flink.ml.common.datastream.DataStreamUtils;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
+import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
+import org.apache.flink.streaming.api.operators.BoundedOneInput;
+import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
+import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.math3.distribution.ChiSquaredDistribution;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+/**
+ * Chi-square test of independence of variables in a contingency table. This Transformer computes
+ * the chi-square statistic,p-value,and dof(number of degrees of freedom) for every feature in the
+ * contingency table, which constructed from the `observed` for each categorical values. All label
+ * and feature values must be categorical.
+ *
+ * <p>See: http://en.wikipedia.org/wiki/Chi-squared_test.
+ */
+public class ChiSqTest implements AlgoOperator<ChiSqTest>, ChiSqTestParams<ChiSqTest> {
+
+    final String bcCategoricalMarginsKey = "bcCategoricalMarginsKey";
+    final String bcLabelMarginsKey = "bcLabelMarginsKey";
+
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+
+    public ChiSqTest() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public Table[] transform(Table... inputs) {
+
+        final String[] inputCols = getInputCols();
+        String labelCol = getLabelCol();
+
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) inputs[0]).getTableEnvironment();
+
+        SingleOutputStreamOperator<Tuple3<String, Object, Object>> colAndFeatureAndLabel =
+                tEnv.toDataStream(inputs[0])
+                        .flatMap(new ExtractColAndFeatureAndLabel(inputCols, labelCol));
+
+        // compute the observed frequencies
+        DataStream<Tuple4<String, Object, Object, Long>> observedFreq =
+                DataStreamUtils.mapPartition(
+                        colAndFeatureAndLabel.keyBy(Tuple3::hashCode),
+                        new GenerateObservedFrequencies());
+
+        SingleOutputStreamOperator<Tuple4<String, Object, Object, Long>> filledObservedFreq =

Review Comment:
   How about we just compute the `distinct labels` and postpone the `fill` operation to Line#199, e.g., `DataStream<Tuple3<String, Double, Integer>> categoricalStatistics =...`?
   
   Using `parallellism=1` for computing all data is not efficient usually.



##########
flink-ml-lib/src/main/java/org/apache/flink/ml/stats/chisqtest/ChiSqTest.java:
##########
@@ -0,0 +1,492 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.ml.stats.chisqtest;
+
+import org.apache.flink.api.common.functions.MapPartitionFunction;
+import org.apache.flink.api.common.functions.RichFlatMapFunction;
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.functions.RichMapPartitionFunction;
+import org.apache.flink.api.common.typeinfo.TypeInformation;
+import org.apache.flink.api.common.typeinfo.Types;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.api.java.tuple.Tuple4;
+import org.apache.flink.api.java.typeutils.RowTypeInfo;
+import org.apache.flink.ml.api.AlgoOperator;
+import org.apache.flink.ml.common.broadcast.BroadcastUtils;
+import org.apache.flink.ml.common.datastream.DataStreamUtils;
+import org.apache.flink.ml.param.Param;
+import org.apache.flink.ml.util.ParamUtils;
+import org.apache.flink.ml.util.ReadWriteUtils;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
+import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
+import org.apache.flink.streaming.api.operators.BoundedOneInput;
+import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
+import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
+import org.apache.flink.table.api.Table;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.api.internal.TableImpl;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.math3.distribution.ChiSquaredDistribution;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+/**
+ * Chi-square test of independence of variables in a contingency table. This Transformer computes
+ * the chi-square statistic,p-value,and dof(number of degrees of freedom) for every feature in the
+ * contingency table, which constructed from the `observed` for each categorical values. All label
+ * and feature values must be categorical.
+ *
+ * <p>See: http://en.wikipedia.org/wiki/Chi-squared_test.
+ */
+public class ChiSqTest implements AlgoOperator<ChiSqTest>, ChiSqTestParams<ChiSqTest> {
+
+    final String bcCategoricalMarginsKey = "bcCategoricalMarginsKey";
+    final String bcLabelMarginsKey = "bcLabelMarginsKey";
+
+    private final Map<Param<?>, Object> paramMap = new HashMap<>();
+
+    public ChiSqTest() {
+        ParamUtils.initializeMapWithDefaultValues(paramMap, this);
+    }
+
+    @Override
+    public Table[] transform(Table... inputs) {
+
+        final String[] inputCols = getInputCols();
+        String labelCol = getLabelCol();
+
+        StreamTableEnvironment tEnv =
+                (StreamTableEnvironment) ((TableImpl) inputs[0]).getTableEnvironment();
+
+        SingleOutputStreamOperator<Tuple3<String, Object, Object>> colAndFeatureAndLabel =
+                tEnv.toDataStream(inputs[0])
+                        .flatMap(new ExtractColAndFeatureAndLabel(inputCols, labelCol));
+
+        // compute the observed frequencies
+        DataStream<Tuple4<String, Object, Object, Long>> observedFreq =
+                DataStreamUtils.mapPartition(
+                        colAndFeatureAndLabel.keyBy(Tuple3::hashCode),
+                        new GenerateObservedFrequencies());
+
+        SingleOutputStreamOperator<Tuple4<String, Object, Object, Long>> filledObservedFreq =
+                observedFreq
+                        .transform(
+                                "filledObservedFreq",
+                                Types.TUPLE(
+                                        Types.STRING,
+                                        Types.GENERIC(Object.class),
+                                        Types.GENERIC(Object.class),
+                                        Types.LONG),
+                                new FillZeroFunc())
+                        .setParallelism(1);
+
+        // return a DataStream of the marginal sums of the factors
+        DataStream<Tuple3<String, Object, Long>> categoricalMargins =
+                DataStreamUtils.mapPartition(
+                        observedFreq.keyBy(tuple -> new Tuple2<>(tuple.f0, tuple.f1).hashCode()),
+                        new MapPartitionFunction<
+                                Tuple4<String, Object, Object, Long>,
+                                Tuple3<String, Object, Long>>() {
+                            @Override
+                            public void mapPartition(
+                                    Iterable<Tuple4<String, Object, Object, Long>> iterable,
+                                    Collector<Tuple3<String, Object, Long>> out) {
+                                HashMap<Tuple2<String, Object>, Long> map = new HashMap<>();
+
+                                for (Tuple4<String, Object, Object, Long> tuple : iterable) {
+                                    Long observedFreq = tuple.f3;
+                                    Tuple2<String, Object> key = new Tuple2<>(tuple.f0, tuple.f1);
+
+                                    if (map.containsKey(key)) {
+                                        Long count = map.get(key);
+                                        map.put(key, count + observedFreq);
+                                    } else {
+                                        map.put(key, observedFreq);
+                                    }
+                                }
+
+                                for (Tuple2<String, Object> key : map.keySet()) {
+                                    Long categoricalMargin = map.get(key);
+                                    out.collect(new Tuple3<>(key.f0, key.f1, categoricalMargin));
+                                }
+                            }
+                        });
+
+        // return a DataStream of the marginal sums of the labels
+        DataStream<Tuple3<String, Object, Long>> labelMargins =
+                DataStreamUtils.mapPartition(
+                        observedFreq.keyBy(tuple -> new Tuple2<>(tuple.f0, tuple.f2).hashCode()),
+                        new MapPartitionFunction<
+                                Tuple4<String, Object, Object, Long>,
+                                Tuple3<String, Object, Long>>() {
+                            @Override
+                            public void mapPartition(
+                                    Iterable<Tuple4<String, Object, Object, Long>> iterable,
+                                    Collector<Tuple3<String, Object, Long>> out) {
+
+                                HashMap<Tuple2<String, Object>, Long> map = new HashMap<>();
+
+                                for (Tuple4<String, Object, Object, Long>
+                                        colAndFeatureAndLabelAndCount : iterable) {
+                                    Long observedFreq = colAndFeatureAndLabelAndCount.f3;
+                                    Tuple2<String, Object> key =
+                                            new Tuple2<>(
+                                                    colAndFeatureAndLabelAndCount.f0,
+                                                    colAndFeatureAndLabelAndCount.f2);
+
+                                    if (map.containsKey(key)) {
+                                        Long count = map.get(key);
+                                        map.put(key, count + observedFreq);
+                                    } else {
+                                        map.put(key, observedFreq);
+                                    }
+                                }
+
+                                for (Tuple2<String, Object> key : map.keySet()) {
+                                    Long labelMargin = map.get(key);
+                                    out.collect(new Tuple3<>(key.f0, key.f1, labelMargin));
+                                }
+                            }
+                        });
+
+        Function<List<DataStream<?>>, DataStream<Tuple3<String, Double, Integer>>> function =
+                dataStreams -> {
+                    DataStream stream = dataStreams.get(0);
+                    return stream.map(new ChiSqFunc(bcCategoricalMarginsKey, bcLabelMarginsKey));
+                };
+
+        HashMap<String, DataStream<?>> bcMap =
+                new HashMap<String, DataStream<?>>() {
+                    {
+                        put(bcCategoricalMarginsKey, categoricalMargins);
+                        put(bcLabelMarginsKey, labelMargins);
+                    }
+                };
+
+        DataStream<Tuple3<String, Double, Integer>> categoricalStatistics =
+                BroadcastUtils.withBroadcastStream(
+                        Collections.singletonList(filledObservedFreq), bcMap, function);
+
+        SingleOutputStreamOperator<Row> chiSqTestResult =
+                categoricalStatistics
+                        .transform(
+                                "chiSqTestResult",
+                                new RowTypeInfo(
+                                        new TypeInformation[] {
+                                            Types.STRING, Types.DOUBLE, Types.DOUBLE, Types.INT
+                                        },
+                                        new String[] {
+                                            "column", "pValue", "statistic", "degreesOfFreedom"
+                                        }),
+                                new AggregateChiSqFunc())
+                        .setParallelism(1);
+
+        return new Table[] {tEnv.fromDataStream(chiSqTestResult)};
+    }
+
+    @Override
+    public void save(String path) throws IOException {
+        ReadWriteUtils.saveMetadata(this, path);
+    }
+
+    public static ChiSqTest load(StreamTableEnvironment tEnv, String path) throws IOException {
+        return ReadWriteUtils.loadStageParam(path);
+    }
+
+    @Override
+    public Map<Param<?>, Object> getParamMap() {
+        return paramMap;
+    }
+
+    private static class ExtractColAndFeatureAndLabel
+            extends RichFlatMapFunction<Row, Tuple3<String, Object, Object>> {
+        private final String[] inputCols;
+        private final String labelCol;
+
+        public ExtractColAndFeatureAndLabel(String[] inputCols, String labelCol) {
+            this.inputCols = inputCols;
+            this.labelCol = labelCol;
+        }
+
+        @Override
+        public void flatMap(Row row, Collector<Tuple3<String, Object, Object>> collector) {
+
+            Object label = row.getFieldAs(labelCol);
+
+            for (String colName : inputCols) {
+                Object value = row.getField(colName);
+                collector.collect(new Tuple3<>(colName, value, label));
+            }
+        }
+    }
+
+    /**
+     * Compute a frequency table(DataStream) of the factors(categorical values). The returned
+     * DataStream contains the observed frequencies (i.e. number of occurrences) in each category.
+     */
+    private static class GenerateObservedFrequencies
+            extends RichMapPartitionFunction<
+                    Tuple3<String, Object, Object>, Tuple4<String, Object, Object, Long>> {
+
+        @Override
+        public void mapPartition(
+                Iterable<Tuple3<String, Object, Object>> iterable,
+                Collector<Tuple4<String, Object, Object, Long>> out) {
+
+            HashMap<Tuple3<String, Object, Object>, Long> map = new HashMap<>();
+
+            for (Tuple3<String, Object, Object> key : iterable) {
+                if (map.containsKey(key)) {
+                    Long count = map.get(key);
+                    map.put(key, count + 1);
+                } else {
+                    map.put(key, 1L);
+                }
+            }
+
+            for (Tuple3<String, Object, Object> key : map.keySet()) {
+                Long count = map.get(key);
+                out.collect(new Tuple4<>(key.f0, key.f1, key.f2, count));
+            }
+        }
+    }
+
+    /** Fill the factors which frequencies are zero in frequency table. */
+    private static class FillZeroFunc
+            extends AbstractStreamOperator<Tuple4<String, Object, Object, Long>>
+            implements OneInputStreamOperator<
+                            Tuple4<String, Object, Object, Long>,
+                            Tuple4<String, Object, Object, Long>>,
+                    BoundedOneInput {
+
+        HashMap<Tuple2<String, Object>, ArrayList<Tuple2<Object, Long>>> values = new HashMap<>();
+        HashSet<Object> numLabels = new HashSet<>();
+
+        @Override
+        public void endInput() {
+
+            for (Map.Entry<Tuple2<String, Object>, ArrayList<Tuple2<Object, Long>>> entry :
+                    values.entrySet()) {
+                ArrayList<Tuple2<Object, Long>> labelAndCountList = entry.getValue();
+                Tuple2<String, Object> categoricalKey = entry.getKey();
+
+                List<Object> existingLabels =
+                        labelAndCountList.stream().map(v -> v.f0).collect(Collectors.toList());
+
+                for (Object label : numLabels) {
+                    if (!existingLabels.contains(label)) {
+                        Tuple2<Object, Long> generatedLabelCount = new Tuple2<>(label, 0L);
+                        labelAndCountList.add(generatedLabelCount);
+                    }
+                }
+
+                for (Tuple2<Object, Long> labelAndCount : labelAndCountList) {
+                    output.collect(
+                            new StreamRecord<>(
+                                    new Tuple4<>(
+                                            categoricalKey.f0,
+                                            categoricalKey.f1,
+                                            labelAndCount.f0,
+                                            labelAndCount.f1)));
+                }
+            }
+        }
+
+        @Override
+        public void processElement(StreamRecord<Tuple4<String, Object, Object, Long>> element) {
+            Tuple4<String, Object, Object, Long> colAndCategoryAndLabelAndCount =
+                    element.getValue();
+            Tuple2<String, Object> key =
+                    new Tuple2<>(
+                            colAndCategoryAndLabelAndCount.f0, colAndCategoryAndLabelAndCount.f1);
+            Tuple2<Object, Long> labelAndCount =
+                    new Tuple2<>(
+                            colAndCategoryAndLabelAndCount.f2, colAndCategoryAndLabelAndCount.f3);
+            ArrayList<Tuple2<Object, Long>> labelAndCountList = values.get(key);
+
+            if (labelAndCountList == null) {
+                ArrayList<Tuple2<Object, Long>> value = new ArrayList<>();
+                value.add(labelAndCount);
+                values.put(key, value);
+            } else {
+                labelAndCountList.add(labelAndCount);
+            }
+
+            numLabels.add(colAndCategoryAndLabelAndCount.f2);
+        }
+    }
+
+    /**
+     * Conduct Pearson's independence test in a contingency table that constructed from the input
+     * `observed` for each categorical values.
+     */
+    private static class ChiSqFunc
+            extends RichMapFunction<
+                    Tuple4<String, Object, Object, Long>, Tuple3<String, Double, Integer>> {
+
+        private final String bcCategoricalMarginsKey;
+        private final String bcLabelMarginsKey;
+        private final Map<Tuple2<String, Object>, Long> categoricalMargins = new HashMap<>();
+        private final Map<Tuple2<String, Object>, Long> labelMargins = new HashMap<>();
+
+        double sampleSize = 0;
+        int numLabels = 0;
+        HashMap<String, Integer> col2NumCategories = new HashMap<>();
+
+        public ChiSqFunc(String bcCategoricalMarginsKey, String bcLabelMarginsKey) {
+            this.bcCategoricalMarginsKey = bcCategoricalMarginsKey;
+            this.bcLabelMarginsKey = bcLabelMarginsKey;
+        }
+
+        @Override
+        public Tuple3<String, Double, Integer> map(Tuple4<String, Object, Object, Long> v) {
+            if (categoricalMargins.isEmpty()) {
+                List<Tuple3<String, Object, Long>> categoricalMarginList =
+                        getRuntimeContext().getBroadcastVariable(bcCategoricalMarginsKey);
+                List<Tuple3<String, Object, Long>> labelMarginList =
+                        getRuntimeContext().getBroadcastVariable(bcLabelMarginsKey);
+
+                for (Tuple3<String, Object, Long> colAndFeatureAndCount : categoricalMarginList) {
+                    String theColName = colAndFeatureAndCount.f0;
+                    col2NumCategories.merge(theColName, 1, Integer::sum);
+                }
+
+                numLabels = (int) labelMarginList.stream().map(x -> x.f1).distinct().count();
+
+                for (Tuple3<String, Object, Long> colAndFeatureAndCount : categoricalMarginList) {
+                    categoricalMargins.put(
+                            new Tuple2<>(colAndFeatureAndCount.f0, colAndFeatureAndCount.f1),
+                            colAndFeatureAndCount.f2);
+                }
+
+                Map<String, Double> sampleSizeCount = new HashMap<>();
+                String tmpKey = null;
+
+                for (Tuple3<String, Object, Long> colAndLabelAndCount : labelMarginList) {
+                    String col = colAndLabelAndCount.f0;
+
+                    if (tmpKey == null) {
+                        tmpKey = col;
+                        sampleSizeCount.put(col, 0D);
+                    }
+
+                    sampleSizeCount.computeIfPresent(
+                            col, (k, count) -> count + colAndLabelAndCount.f2);
+                    labelMargins.put(
+                            new Tuple2<>(col, colAndLabelAndCount.f1), colAndLabelAndCount.f2);
+                }
+
+                Optional<Double> sampleSizeOpt =
+                        sampleSizeCount.values().stream().reduce(Double::sum);
+                Preconditions.checkArgument(sampleSizeOpt.isPresent());
+                sampleSize = sampleSizeOpt.get();
+            }
+
+            String colName = v.f0;
+            // Degrees of freedom
+            int dof = (col2NumCategories.get(colName) - 1) * (numLabels - 1);
+
+            Tuple2<String, Object> category = new Tuple2<>(v.f0, v.f1);
+
+            Tuple2<String, Object> colAndLabelKey = new Tuple2<>(v.f0, v.f2);
+            Long theCategoricalMargin = categoricalMargins.get(category);
+            Long theLabelMargin = labelMargins.get(colAndLabelKey);
+            Long observed = v.f3;
+
+            double expected = (double) (theLabelMargin * theCategoricalMargin) / sampleSize;
+            double categoricalStatistic = pearsonFunc(observed, expected);
+
+            return new Tuple3<>(colName, categoricalStatistic, dof);
+        }
+
+        // Pearson's chi-squared test: http://en.wikipedia.org/wiki/Pearson%27s_chi-squared_test
+        private double pearsonFunc(double observed, double expected) {
+            double dev = observed - expected;
+            return dev * dev / expected;
+        }
+    }
+
+    /**
+     * This function computes the Pearson's chi-squared statistic , p-value ,and the number of
+     * degrees of freedom for every feature across the input DataStream.
+     */
+    private static class AggregateChiSqFunc extends AbstractStreamOperator<Row>
+            implements OneInputStreamOperator<Tuple3<String, Double, Integer>, Row>,
+                    BoundedOneInput {
+
+        HashMap<String, Tuple2<Double, Integer>> col2Statistic = new HashMap<>();

Review Comment:
   If you are using `col2statistics` here, we have to store it in `state` for failover.
   
   How about we replace the logic here with existing operators, such as `keyBy + reduce + map`?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscribe@flink.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org