You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/08/12 03:41:42 UTC
svn commit: r1617394 - in /hive/branches/spark: ./ data/conf/spark/ itests/
itests/qtest-spark/ itests/qtest/ ql/src/test/queries/clientpositive/
ql/src/test/results/clientpositive/ ql/src/test/results/clientpositive/spark/
Author: brock
Date: Tue Aug 12 01:41:41 2014
New Revision: 1617394
URL: http://svn.apache.org/r1617394
Log:
HIVE-7665 - Create TestSparkCliDriver to run test in spark local mode (Szehon via Brock) [Spark Branch]
Added:
hive/branches/spark/data/conf/spark/
hive/branches/spark/data/conf/spark/hive-site.xml
hive/branches/spark/itests/qtest-spark/
hive/branches/spark/itests/qtest-spark/pom.xml
hive/branches/spark/ql/src/test/queries/clientpositive/spark_test.q
hive/branches/spark/ql/src/test/results/clientpositive/spark/
hive/branches/spark/ql/src/test/results/clientpositive/spark/spark_test.q.out
hive/branches/spark/ql/src/test/results/clientpositive/spark_test.q.out
Modified:
hive/branches/spark/itests/pom.xml
hive/branches/spark/itests/qtest/testconfiguration.properties
hive/branches/spark/pom.xml
Added: hive/branches/spark/data/conf/spark/hive-site.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/data/conf/spark/hive-site.xml?rev=1617394&view=auto
==============================================================================
--- hive/branches/spark/data/conf/spark/hive-site.xml (added)
+++ hive/branches/spark/data/conf/spark/hive-site.xml Tue Aug 12 01:41:41 2014
@@ -0,0 +1,203 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<configuration>
+
+<!-- Hive Configuration can either be stored in this file or in the hadoop configuration files -->
+<!-- that are implied by Hadoop setup variables. -->
+<!-- Aside from Hadoop setup variables - this file is provided as a convenience so that Hive -->
+<!-- users do not have to edit hadoop configuration files (that may be managed as a centralized -->
+<!-- resource). -->
+
+<!-- Hive Execution Parameters -->
+<property>
+ <name>hadoop.tmp.dir</name>
+ <value>${test.tmp.dir}/hadoop-tmp</value>
+ <description>A base for other temporary directories.</description>
+</property>
+
+<property>
+ <name>hive.exec.scratchdir</name>
+ <value>${test.tmp.dir}/scratchdir</value>
+ <description>Scratch space for Hive jobs</description>
+</property>
+
+<property>
+ <name>hive.exec.local.scratchdir</name>
+ <value>${test.tmp.dir}/localscratchdir/</value>
+ <description>Local scratch space for Hive jobs</description>
+</property>
+
+<property>
+ <name>javax.jdo.option.ConnectionURL</name>
+ <value>jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true</value>
+</property>
+
+<property>
+ <name>hive.stats.dbconnectionstring</name>
+ <value>jdbc:derby:;databaseName=${test.tmp.dir}/TempStatsStore;create=true</value>
+</property>
+
+
+<property>
+ <name>javax.jdo.option.ConnectionDriverName</name>
+ <value>org.apache.derby.jdbc.EmbeddedDriver</value>
+</property>
+
+<property>
+ <name>javax.jdo.option.ConnectionUserName</name>
+ <value>APP</value>
+</property>
+
+<property>
+ <name>javax.jdo.option.ConnectionPassword</name>
+ <value>mine</value>
+</property>
+
+<property>
+ <!-- this should eventually be deprecated since the metastore should supply this -->
+ <name>hive.metastore.warehouse.dir</name>
+ <value>${test.warehouse.dir}</value>
+ <description></description>
+</property>
+
+<property>
+ <name>hive.metastore.metadb.dir</name>
+ <value>file://${test.tmp.dir}/metadb/</value>
+ <description>
+ Required by metastore server or if the uris argument below is not supplied
+ </description>
+</property>
+
+<property>
+ <name>test.log.dir</name>
+ <value>${test.tmp.dir}/log/</value>
+ <description></description>
+</property>
+
+<property>
+ <name>test.data.files</name>
+ <value>${hive.root}/data/files</value>
+ <description></description>
+</property>
+
+<property>
+ <name>hive.jar.path</name>
+ <value>${maven.local.repository}/org/apache/hive/hive-exec/${hive.version}/hive-exec-${hive.version}.jar</value>
+ <description></description>
+</property>
+
+<property>
+ <name>test.data.scripts</name>
+ <value>${hive.root}/data/scripts</value>
+ <description></description>
+</property>
+
+<property>
+ <name>hive.metastore.rawstore.impl</name>
+ <value>org.apache.hadoop.hive.metastore.ObjectStore</value>
+ <description>Name of the class that implements org.apache.hadoop.hive.metastore.rawstore interface. This class is used to store and retrieval of raw metadata objects such as table, database</description>
+</property>
+
+<property>
+ <name>hive.querylog.location</name>
+ <value>${test.tmp.dir}/tmp</value>
+ <description>Location of the structured hive logs</description>
+</property>
+
+<property>
+ <name>hive.exec.pre.hooks</name>
+ <value>org.apache.hadoop.hive.ql.hooks.PreExecutePrinter, org.apache.hadoop.hive.ql.hooks.EnforceReadOnlyTables</value>
+ <description>Pre Execute Hook for Tests</description>
+</property>
+
+<property>
+ <name>hive.exec.post.hooks</name>
+ <value>org.apache.hadoop.hive.ql.hooks.PostExecutePrinter</value>
+ <description>Post Execute Hook for Tests</description>
+</property>
+
+<property>
+ <name>hive.support.concurrency</name>
+ <value>false</value>
+ <description>Whether hive supports concurrency or not. A zookeeper instance must be up and running for the default hive lock manager to support read-write locks.</description>
+</property>
+
+<property>
+ <name>fs.pfile.impl</name>
+ <value>org.apache.hadoop.fs.ProxyLocalFileSystem</value>
+ <description>A proxy for local file system used for cross file system testing</description>
+</property>
+
+<property>
+ <name>hive.exec.mode.local.auto</name>
+ <value>false</value>
+ <description>
+ Let hive determine whether to run in local mode automatically
+ Disabling this for tests so that minimr is not affected
+ </description>
+</property>
+
+<property>
+ <name>hive.auto.convert.join</name>
+ <value>false</value>
+ <description>Whether Hive enable the optimization about converting common join into mapjoin based on the input file size</description>
+</property>
+
+<property>
+ <name>hive.ignore.mapjoin.hint</name>
+ <value>true</value>
+ <description>Whether Hive ignores the mapjoin hint</description>
+</property>
+
+<property>
+ <name>io.sort.mb</name>
+ <value>10</value>
+</property>
+
+<property>
+ <name>hive.input.format</name>
+ <value>org.apache.hadoop.hive.ql.io.CombineHiveInputFormat</value>
+ <description>The default input format, if it is not specified, the system assigns it. It is set to HiveInputFormat for hadoop versions 17, 18 and 19, whereas it is set to CombineHiveInputFormat for hadoop 20. The user can always overwrite it - if there is a bug in CombineHiveInputFormat, it can always be manually set to HiveInputFormat. </description>
+</property>
+
+<property>
+ <name>hive.default.rcfile.serde</name>
+ <value>org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe</value>
+ <description>The default SerDe hive will use for the rcfile format</description>
+</property>
+
+<property>
+ <name>hive.stats.dbclass</name>
+ <value>counter</value>
+ <description>The default storatge that stores temporary hive statistics. Currently, jdbc, hbase and counter type is supported</description>
+</property>
+
+<property>
+ <name>hive.execution.engine</name>
+ <value>spark</value>
+ <description>Chooses execution engine. Options are: mr (Map reduce, default), tez (hadoop 2 only), spark</description>
+</property>
+
+<property>
+ <name>spark.master</name>
+ <value>local[4]</value>
+</property>
+
+</configuration>
Modified: hive/branches/spark/itests/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/pom.xml?rev=1617394&r1=1617393&r2=1617394&view=diff
==============================================================================
--- hive/branches/spark/itests/pom.xml (original)
+++ hive/branches/spark/itests/pom.xml Tue Aug 12 01:41:41 2014
@@ -44,6 +44,7 @@
<profile>
<id>hadoop-2</id>
<modules>
+ <module>qtest-spark</module>
<module>hive-unit-hadoop2</module>
<module>hive-minikdc</module>
</modules>
Added: hive/branches/spark/itests/qtest-spark/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/qtest-spark/pom.xml?rev=1617394&view=auto
==============================================================================
--- hive/branches/spark/itests/qtest-spark/pom.xml (added)
+++ hive/branches/spark/itests/qtest-spark/pom.xml Tue Aug 12 01:41:41 2014
@@ -0,0 +1,365 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-it</artifactId>
+ <version>0.14.0-SNAPSHOT</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+
+ <artifactId>hive-it-qfile-spark</artifactId>
+ <packaging>jar</packaging>
+ <name>Hive Integration - QFile Spark Tests</name>
+
+ <properties>
+ <hive.path.to.root>../..</hive.path.to.root>
+
+ <!-- The following are to match the latest in spark project, overriding hive's versions -->
+ <spark.jetty.version>8.1.14.v20131031</spark.jetty.version>
+ <spark.kryo.version>2.21</spark.kryo.version>
+ <qfile></qfile>
+ <qfile_regex></qfile_regex>
+ <run_disabled>false</run_disabled>
+ <clustermode></clustermode>
+ <execute.beeline.tests>false</execute.beeline.tests>
+ <active.hadoop.version>${hadoop-23.version}</active.hadoop.version>
+ <test.dfs.mkdir>-mkdir -p</test.dfs.mkdir>
+ </properties>
+
+ <dependencies>
+ <!-- dependencies are always listed in sorted order by groupId, artifectId -->
+ <!-- test intra-project spark -->
+ <dependency>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-core_${scala.binary.version}</artifactId>
+ <version>${spark.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-util</artifactId>
+ <version>${spark.jetty.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-security</artifactId>
+ <version>${spark.jetty.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-plus</artifactId>
+ <version>${spark.jetty.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
+ <version>${spark.jetty.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.esotericsoftware.kryo</groupId>
+ <artifactId>kryo</artifactId>
+ <version>${spark.kryo.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <version>${mockito-all.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <!-- test intra-project -->
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-ant</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-common</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-contrib</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${project.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-it-custom-serde</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-it-util</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-serde</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <!-- test inter-project -->
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-servlet</artifactId>
+ <version>${jersey.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-archives</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+ <version>${hadoop-23.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-hs</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapreduce-client-core</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-tests</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ <classifier>tests</classifier>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-client</artifactId>
+ <version>${hadoop-23.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-common</artifactId>
+ <version>${hbase.hadoop2.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-common</artifactId>
+ <version>${hbase.hadoop2.version}</version>
+ <scope>test</scope>
+ <classifier>tests</classifier>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-hadoop-compat</artifactId>
+ <version>${hbase.hadoop2.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-hadoop-compat</artifactId>
+ <version>${hbase.hadoop2.version}</version>
+ <scope>test</scope>
+ <classifier>tests</classifier>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-hadoop2-compat</artifactId>
+ <version>${hbase.hadoop2.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-hadoop2-compat</artifactId>
+ <version>${hbase.hadoop2.version}</version>
+ <scope>test</scope>
+ <classifier>tests</classifier>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-server</artifactId>
+ <version>${hbase.hadoop2.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-server</artifactId>
+ <version>${hbase.hadoop2.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>properties-maven-plugin</artifactId>
+ <version>1.0-alpha-2</version>
+ <executions>
+ <execution>
+ <phase>initialize</phase>
+ <goals>
+ <goal>read-project-properties</goal>
+ </goals>
+ <configuration>
+ <files>
+ <file>${basedir}/../qtest/testconfiguration.properties</file>
+ </files>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>generate-tests-sources</id>
+ <phase>generate-test-sources</phase>
+ <configuration>
+ <target>
+ <property name="test.classpath" refid="maven.test.classpath"/>
+ <echo message="${test.classpath}"/>
+ <taskdef resource="net/sf/antcontrib/antcontrib.properties"
+ classpathref="maven.plugin.classpath" />
+ <taskdef name="qtestgen" classname="org.apache.hadoop.hive.ant.QTestGenTask"
+ classpath="${test.classpath}" />
+ <mkdir dir="${project.build.directory}/qfile-results/clientpositive/spark" />
+ <qtestgen hiveRootDirectory="${basedir}/${hive.path.to.root}/"
+ outputDirectory="${project.build.directory}/generated-test-sources/java/org/apache/hadoop/hive/cli/"
+ templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestCliDriver.vm"
+ queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientpositive/"
+ queryFile="${qfile}"
+ includeQueryFile="${spark.query.files}"
+ runDisabled="${run_disabled}"
+ hiveConfDir="${basedir}/${hive.path.to.root}/data/conf/spark"
+ resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/spark"
+ className="TestSparkCliDriver"
+ logFile="${project.build.directory}/testsparkclidrivergen.log"
+ logDirectory="${project.build.directory}/qfile-results/clientpositive/spark"
+ initScript="q_test_init.sql"
+ cleanupScript="q_test_cleanup.sql"/>
+ </target>
+ </configuration>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <version>${maven.build-helper.plugin.version}</version>
+ <executions>
+ <execution>
+ <id>add-test-sources</id>
+ <phase>generate-test-sources</phase>
+ <goals>
+ <goal>add-test-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>target/generated-test-sources/java</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
Modified: hive/branches/spark/itests/qtest/testconfiguration.properties
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/qtest/testconfiguration.properties?rev=1617394&r1=1617393&r2=1617394&view=diff
==============================================================================
--- hive/branches/spark/itests/qtest/testconfiguration.properties (original)
+++ hive/branches/spark/itests/qtest/testconfiguration.properties Tue Aug 12 01:41:41 2014
@@ -3,3 +3,4 @@ minimr.query.negative.files=cluster_task
minitez.query.files=tez_fsstat.q,mapjoin_decimal.q,tez_join_tests.q,tez_joins_explain.q,mrr.q,tez_dml.q,tez_insert_overwrite_local_directory_1.q,tez_union.q,bucket_map_join_tez1.q,bucket_map_join_tez2.q,tez_schema_evolution.q,tez_join_hash.q
minitez.query.files.shared=orc_merge1.q,orc_merge2.q,orc_merge3.q,orc_merge4.q,alter_merge_orc.q,alter_merge_2_orc.q,alter_merge_stats_orc.q,cross_product_check_1.q,cross_product_check_2.q,dynpart_sort_opt_vectorization.q,dynpart_sort_optimization.q,orc_analyze.q,join0.q,join1.q,auto_join0.q,auto_join1.q,bucket2.q,bucket3.q,bucket4.q,count.q,create_merge_compressed.q,cross_join.q,ctas.q,custom_input_output_format.q,disable_merge_for_bucketing.q,enforce_order.q,filter_join_breaktask.q,filter_join_breaktask2.q,groupby1.q,groupby2.q,groupby3.q,having.q,insert1.q,insert_into1.q,insert_into2.q,leftsemijoin.q,limit_pushdown.q,load_dyn_part1.q,load_dyn_part2.q,load_dyn_part3.q,mapjoin_mapjoin.q,mapreduce1.q,mapreduce2.q,merge1.q,merge2.q,metadata_only_queries.q,sample1.q,subquery_in.q,subquery_exists.q,vectorization_15.q,ptf.q,stats_counter.q,stats_noscan_1.q,stats_counter_partitioned.q,union2.q,union3.q,union4.q,union5.q,union6.q,union7.q,union8.q,union9.q,transform1.q,transform2.q,transf
orm_ppr1.q,transform_ppr2.q,script_env_var1.q,script_env_var2.q,script_pipe.q,scriptfile1.q,metadataonly1.q,temp_table.q,vectorized_ptf.q,optimize_nullscan.q,vector_cast_constant.q,vector_string_concat.q,vector_decimal_aggregate.q,vector_left_outer_join.q,vectorization_12.q,vectorization_13.q,vectorization_14.q,vectorization_9.q,vectorization_part_project.q,vectorization_short_regress.q,vectorized_mapjoin.q,vectorized_nested_mapjoin.q,vectorized_shufflejoin.q,vectorized_timestamp_funcs.q,vector_data_types.q
beeline.positive.exclude=add_part_exist.q,alter1.q,alter2.q,alter4.q,alter5.q,alter_rename_partition.q,alter_rename_partition_authorization.q,archive.q,archive_corrupt.q,archive_multi.q,archive_mr_1806.q,archive_multi_mr_1806.q,authorization_1.q,authorization_2.q,authorization_4.q,authorization_5.q,authorization_6.q,authorization_7.q,ba_table1.q,ba_table2.q,ba_table3.q,ba_table_udfs.q,binary_table_bincolserde.q,binary_table_colserde.q,cluster.q,columnarserde_create_shortcut.q,combine2.q,constant_prop.q,create_nested_type.q,create_or_replace_view.q,create_struct_table.q,create_union_table.q,database.q,database_location.q,database_properties.q,ddltime.q,describe_database_json.q,drop_database_removes_partition_dirs.q,escape1.q,escape2.q,exim_00_nonpart_empty.q,exim_01_nonpart.q,exim_02_00_part_empty.q,exim_02_part.q,exim_03_nonpart_over_compat.q,exim_04_all_part.q,exim_04_evolved_parts.q,exim_05_some_part.q,exim_06_one_part.q,exim_07_all_part_over_nonoverlap.q,exim_08_nonpart_rename.q,
exim_09_part_spec_nonoverlap.q,exim_10_external_managed.q,exim_11_managed_external.q,exim_12_external_location.q,exim_13_managed_location.q,exim_14_managed_location_over_existing.q,exim_15_external_part.q,exim_16_part_external.q,exim_17_part_managed.q,exim_18_part_external.q,exim_19_00_part_external_location.q,exim_19_part_external_location.q,exim_20_part_managed_location.q,exim_21_export_authsuccess.q,exim_22_import_exist_authsuccess.q,exim_23_import_part_authsuccess.q,exim_24_import_nonexist_authsuccess.q,global_limit.q,groupby_complex_types.q,groupby_complex_types_multi_single_reducer.q,index_auth.q,index_auto.q,index_auto_empty.q,index_bitmap.q,index_bitmap1.q,index_bitmap2.q,index_bitmap3.q,index_bitmap_auto.q,index_bitmap_rc.q,index_compact.q,index_compact_1.q,index_compact_2.q,index_compact_3.q,index_stale_partitioned.q,init_file.q,input16.q,input16_cc.q,input46.q,input_columnarserde.q,input_dynamicserde.q,input_lazyserde.q,input_testxpath3.q,input_testxpath4.q,insert2_overwr
ite_partitions.q,insertexternal1.q,join_thrift.q,lateral_view.q,load_binary_data.q,load_exist_part_authsuccess.q,load_nonpart_authsuccess.q,load_part_authsuccess.q,loadpart_err.q,lock1.q,lock2.q,lock3.q,lock4.q,merge_dynamic_partition.q,multi_insert.q,multi_insert_move_tasks_share_dependencies.q,null_column.q,ppd_clusterby.q,query_with_semi.q,rename_column.q,sample6.q,sample_islocalmode_hook.q,set_processor_namespaces.q,show_tables.q,source.q,split_sample.q,str_to_map.q,transform1.q,udaf_collect_set.q,udaf_context_ngrams.q,udaf_histogram_numeric.q,udaf_ngrams.q,udaf_percentile_approx.q,udf_array.q,udf_bitmap_and.q,udf_bitmap_or.q,udf_explode.q,udf_format_number.q,udf_map.q,udf_map_keys.q,udf_map_values.q,udf_max.q,udf_min.q,udf_named_struct.q,udf_percentile.q,udf_printf.q,udf_sentences.q,udf_sort_array.q,udf_split.q,udf_struct.q,udf_substr.q,udf_translate.q,udf_union.q,udf_xpath.q,udtf_stack.q,view.q,virtual_column.q
+spark.query.files=spark_test.q
Modified: hive/branches/spark/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/pom.xml?rev=1617394&r1=1617393&r2=1617394&view=diff
==============================================================================
--- hive/branches/spark/pom.xml (original)
+++ hive/branches/spark/pom.xml Tue Aug 12 01:41:41 2014
@@ -139,7 +139,7 @@
in artifact name and given that zookeeper < 3.5
requires netty < 3.6.0 we force hadoops version
-->
- <netty.version>3.4.0.Final</netty.version>
+ <netty.version>3.7.0.Final</netty.version>
<parquet.version>1.5.0</parquet.version>
<pig.version>0.12.0</pig.version>
<protobuf.version>2.5.0</protobuf.version>
@@ -155,7 +155,7 @@
<wadl-resourcedoc-doclet.version>1.4</wadl-resourcedoc-doclet.version>
<velocity.version>1.5</velocity.version>
<xerces.version>2.9.1</xerces.version>
- <zookeeper.version>3.4.5</zookeeper.version>
+ <zookeeper.version>3.4.6</zookeeper.version>
<jpam.version>1.1</jpam.version>
<felix.version>2.4.0</felix.version>
</properties>
Added: hive/branches/spark/ql/src/test/queries/clientpositive/spark_test.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/spark_test.q?rev=1617394&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/spark_test.q (added)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/spark_test.q Tue Aug 12 01:41:41 2014
@@ -0,0 +1,4 @@
+-- SORT_QUERY_RESULTS
+
+select key from src;
+select key,avg(key) from src group by key;
Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/spark_test.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/spark_test.q.out?rev=1617394&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/spark_test.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/spark_test.q.out Tue Aug 12 01:41:41 2014
@@ -0,0 +1,829 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+select key from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+select key from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0
+0
+0
+10
+100
+100
+103
+103
+104
+104
+105
+11
+111
+113
+113
+114
+116
+118
+118
+119
+119
+119
+12
+12
+120
+120
+125
+125
+126
+128
+128
+128
+129
+129
+131
+133
+134
+134
+136
+137
+137
+138
+138
+138
+138
+143
+145
+146
+146
+149
+149
+15
+15
+150
+152
+152
+153
+155
+156
+157
+158
+160
+162
+163
+164
+164
+165
+165
+166
+167
+167
+167
+168
+169
+169
+169
+169
+17
+170
+172
+172
+174
+174
+175
+175
+176
+176
+177
+178
+179
+179
+18
+18
+180
+181
+183
+186
+187
+187
+187
+189
+19
+190
+191
+191
+192
+193
+193
+193
+194
+195
+195
+196
+197
+197
+199
+199
+199
+2
+20
+200
+200
+201
+202
+203
+203
+205
+205
+207
+207
+208
+208
+208
+209
+209
+213
+213
+214
+216
+216
+217
+217
+218
+219
+219
+221
+221
+222
+223
+223
+224
+224
+226
+228
+229
+229
+230
+230
+230
+230
+230
+233
+233
+235
+237
+237
+238
+238
+239
+239
+24
+24
+241
+242
+242
+244
+247
+248
+249
+252
+255
+255
+256
+256
+257
+258
+26
+26
+260
+262
+263
+265
+265
+266
+27
+272
+272
+273
+273
+273
+274
+275
+277
+277
+277
+277
+278
+278
+28
+280
+280
+281
+281
+282
+282
+283
+284
+285
+286
+287
+288
+288
+289
+291
+292
+296
+298
+298
+298
+30
+302
+305
+306
+307
+307
+308
+309
+309
+310
+311
+311
+311
+315
+316
+316
+316
+317
+317
+318
+318
+318
+321
+321
+322
+322
+323
+325
+325
+327
+327
+327
+33
+331
+331
+332
+333
+333
+335
+336
+338
+339
+34
+341
+342
+342
+344
+344
+345
+348
+348
+348
+348
+348
+35
+35
+35
+351
+353
+353
+356
+360
+362
+364
+365
+366
+367
+367
+368
+369
+369
+369
+37
+37
+373
+374
+375
+377
+378
+379
+382
+382
+384
+384
+384
+386
+389
+392
+393
+394
+395
+395
+396
+396
+396
+397
+397
+399
+399
+4
+400
+401
+401
+401
+401
+401
+402
+403
+403
+403
+404
+404
+406
+406
+406
+406
+407
+409
+409
+409
+41
+411
+413
+413
+414
+414
+417
+417
+417
+418
+419
+42
+42
+421
+424
+424
+427
+429
+429
+43
+430
+430
+430
+431
+431
+431
+432
+435
+436
+437
+438
+438
+438
+439
+439
+44
+443
+444
+446
+448
+449
+452
+453
+454
+454
+454
+455
+457
+458
+458
+459
+459
+460
+462
+462
+463
+463
+466
+466
+466
+467
+468
+468
+468
+468
+469
+469
+469
+469
+469
+47
+470
+472
+475
+477
+478
+478
+479
+480
+480
+480
+481
+482
+483
+484
+485
+487
+489
+489
+489
+489
+490
+491
+492
+492
+493
+494
+495
+496
+497
+498
+498
+498
+5
+5
+5
+51
+51
+53
+54
+57
+58
+58
+64
+65
+66
+67
+67
+69
+70
+70
+70
+72
+72
+74
+76
+76
+77
+78
+8
+80
+82
+83
+83
+84
+84
+85
+86
+87
+9
+90
+90
+90
+92
+95
+95
+96
+97
+97
+98
+98
+PREHOOK: query: select key,avg(key) from src group by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select key,avg(key) from src group by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0 0.0
+10 10.0
+100 100.0
+103 103.0
+104 104.0
+105 105.0
+11 11.0
+111 111.0
+113 113.0
+114 114.0
+116 116.0
+118 118.0
+119 119.0
+12 12.0
+120 120.0
+125 125.0
+126 126.0
+128 128.0
+129 129.0
+131 131.0
+133 133.0
+134 134.0
+136 136.0
+137 137.0
+138 138.0
+143 143.0
+145 145.0
+146 146.0
+149 149.0
+15 15.0
+150 150.0
+152 152.0
+153 153.0
+155 155.0
+156 156.0
+157 157.0
+158 158.0
+160 160.0
+162 162.0
+163 163.0
+164 164.0
+165 165.0
+166 166.0
+167 167.0
+168 168.0
+169 169.0
+17 17.0
+170 170.0
+172 172.0
+174 174.0
+175 175.0
+176 176.0
+177 177.0
+178 178.0
+179 179.0
+18 18.0
+180 180.0
+181 181.0
+183 183.0
+186 186.0
+187 187.0
+189 189.0
+19 19.0
+190 190.0
+191 191.0
+192 192.0
+193 193.0
+194 194.0
+195 195.0
+196 196.0
+197 197.0
+199 199.0
+2 2.0
+20 20.0
+200 200.0
+201 201.0
+202 202.0
+203 203.0
+205 205.0
+207 207.0
+208 208.0
+209 209.0
+213 213.0
+214 214.0
+216 216.0
+217 217.0
+218 218.0
+219 219.0
+221 221.0
+222 222.0
+223 223.0
+224 224.0
+226 226.0
+228 228.0
+229 229.0
+230 230.0
+233 233.0
+235 235.0
+237 237.0
+238 238.0
+239 239.0
+24 24.0
+241 241.0
+242 242.0
+244 244.0
+247 247.0
+248 248.0
+249 249.0
+252 252.0
+255 255.0
+256 256.0
+257 257.0
+258 258.0
+26 26.0
+260 260.0
+262 262.0
+263 263.0
+265 265.0
+266 266.0
+27 27.0
+272 272.0
+273 273.0
+274 274.0
+275 275.0
+277 277.0
+278 278.0
+28 28.0
+280 280.0
+281 281.0
+282 282.0
+283 283.0
+284 284.0
+285 285.0
+286 286.0
+287 287.0
+288 288.0
+289 289.0
+291 291.0
+292 292.0
+296 296.0
+298 298.0
+30 30.0
+302 302.0
+305 305.0
+306 306.0
+307 307.0
+308 308.0
+309 309.0
+310 310.0
+311 311.0
+315 315.0
+316 316.0
+317 317.0
+318 318.0
+321 321.0
+322 322.0
+323 323.0
+325 325.0
+327 327.0
+33 33.0
+331 331.0
+332 332.0
+333 333.0
+335 335.0
+336 336.0
+338 338.0
+339 339.0
+34 34.0
+341 341.0
+342 342.0
+344 344.0
+345 345.0
+348 348.0
+35 35.0
+351 351.0
+353 353.0
+356 356.0
+360 360.0
+362 362.0
+364 364.0
+365 365.0
+366 366.0
+367 367.0
+368 368.0
+369 369.0
+37 37.0
+373 373.0
+374 374.0
+375 375.0
+377 377.0
+378 378.0
+379 379.0
+382 382.0
+384 384.0
+386 386.0
+389 389.0
+392 392.0
+393 393.0
+394 394.0
+395 395.0
+396 396.0
+397 397.0
+399 399.0
+4 4.0
+400 400.0
+401 401.0
+402 402.0
+403 403.0
+404 404.0
+406 406.0
+407 407.0
+409 409.0
+41 41.0
+411 411.0
+413 413.0
+414 414.0
+417 417.0
+418 418.0
+419 419.0
+42 42.0
+421 421.0
+424 424.0
+427 427.0
+429 429.0
+43 43.0
+430 430.0
+431 431.0
+432 432.0
+435 435.0
+436 436.0
+437 437.0
+438 438.0
+439 439.0
+44 44.0
+443 443.0
+444 444.0
+446 446.0
+448 448.0
+449 449.0
+452 452.0
+453 453.0
+454 454.0
+455 455.0
+457 457.0
+458 458.0
+459 459.0
+460 460.0
+462 462.0
+463 463.0
+466 466.0
+467 467.0
+468 468.0
+469 469.0
+47 47.0
+470 470.0
+472 472.0
+475 475.0
+477 477.0
+478 478.0
+479 479.0
+480 480.0
+481 481.0
+482 482.0
+483 483.0
+484 484.0
+485 485.0
+487 487.0
+489 489.0
+490 490.0
+491 491.0
+492 492.0
+493 493.0
+494 494.0
+495 495.0
+496 496.0
+497 497.0
+498 498.0
+5 5.0
+51 51.0
+53 53.0
+54 54.0
+57 57.0
+58 58.0
+64 64.0
+65 65.0
+66 66.0
+67 67.0
+69 69.0
+70 70.0
+72 72.0
+74 74.0
+76 76.0
+77 77.0
+78 78.0
+8 8.0
+80 80.0
+82 82.0
+83 83.0
+84 84.0
+85 85.0
+86 86.0
+87 87.0
+9 9.0
+90 90.0
+92 92.0
+95 95.0
+96 96.0
+97 97.0
+98 98.0
Added: hive/branches/spark/ql/src/test/results/clientpositive/spark_test.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark_test.q.out?rev=1617394&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark_test.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark_test.q.out Tue Aug 12 01:41:41 2014
@@ -0,0 +1,829 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+select key from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+select key from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0
+0
+0
+10
+100
+100
+103
+103
+104
+104
+105
+11
+111
+113
+113
+114
+116
+118
+118
+119
+119
+119
+12
+12
+120
+120
+125
+125
+126
+128
+128
+128
+129
+129
+131
+133
+134
+134
+136
+137
+137
+138
+138
+138
+138
+143
+145
+146
+146
+149
+149
+15
+15
+150
+152
+152
+153
+155
+156
+157
+158
+160
+162
+163
+164
+164
+165
+165
+166
+167
+167
+167
+168
+169
+169
+169
+169
+17
+170
+172
+172
+174
+174
+175
+175
+176
+176
+177
+178
+179
+179
+18
+18
+180
+181
+183
+186
+187
+187
+187
+189
+19
+190
+191
+191
+192
+193
+193
+193
+194
+195
+195
+196
+197
+197
+199
+199
+199
+2
+20
+200
+200
+201
+202
+203
+203
+205
+205
+207
+207
+208
+208
+208
+209
+209
+213
+213
+214
+216
+216
+217
+217
+218
+219
+219
+221
+221
+222
+223
+223
+224
+224
+226
+228
+229
+229
+230
+230
+230
+230
+230
+233
+233
+235
+237
+237
+238
+238
+239
+239
+24
+24
+241
+242
+242
+244
+247
+248
+249
+252
+255
+255
+256
+256
+257
+258
+26
+26
+260
+262
+263
+265
+265
+266
+27
+272
+272
+273
+273
+273
+274
+275
+277
+277
+277
+277
+278
+278
+28
+280
+280
+281
+281
+282
+282
+283
+284
+285
+286
+287
+288
+288
+289
+291
+292
+296
+298
+298
+298
+30
+302
+305
+306
+307
+307
+308
+309
+309
+310
+311
+311
+311
+315
+316
+316
+316
+317
+317
+318
+318
+318
+321
+321
+322
+322
+323
+325
+325
+327
+327
+327
+33
+331
+331
+332
+333
+333
+335
+336
+338
+339
+34
+341
+342
+342
+344
+344
+345
+348
+348
+348
+348
+348
+35
+35
+35
+351
+353
+353
+356
+360
+362
+364
+365
+366
+367
+367
+368
+369
+369
+369
+37
+37
+373
+374
+375
+377
+378
+379
+382
+382
+384
+384
+384
+386
+389
+392
+393
+394
+395
+395
+396
+396
+396
+397
+397
+399
+399
+4
+400
+401
+401
+401
+401
+401
+402
+403
+403
+403
+404
+404
+406
+406
+406
+406
+407
+409
+409
+409
+41
+411
+413
+413
+414
+414
+417
+417
+417
+418
+419
+42
+42
+421
+424
+424
+427
+429
+429
+43
+430
+430
+430
+431
+431
+431
+432
+435
+436
+437
+438
+438
+438
+439
+439
+44
+443
+444
+446
+448
+449
+452
+453
+454
+454
+454
+455
+457
+458
+458
+459
+459
+460
+462
+462
+463
+463
+466
+466
+466
+467
+468
+468
+468
+468
+469
+469
+469
+469
+469
+47
+470
+472
+475
+477
+478
+478
+479
+480
+480
+480
+481
+482
+483
+484
+485
+487
+489
+489
+489
+489
+490
+491
+492
+492
+493
+494
+495
+496
+497
+498
+498
+498
+5
+5
+5
+51
+51
+53
+54
+57
+58
+58
+64
+65
+66
+67
+67
+69
+70
+70
+70
+72
+72
+74
+76
+76
+77
+78
+8
+80
+82
+83
+83
+84
+84
+85
+86
+87
+9
+90
+90
+90
+92
+95
+95
+96
+97
+97
+98
+98
+PREHOOK: query: select key,avg(key) from src group by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select key,avg(key) from src group by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0 0.0
+10 10.0
+100 100.0
+103 103.0
+104 104.0
+105 105.0
+11 11.0
+111 111.0
+113 113.0
+114 114.0
+116 116.0
+118 118.0
+119 119.0
+12 12.0
+120 120.0
+125 125.0
+126 126.0
+128 128.0
+129 129.0
+131 131.0
+133 133.0
+134 134.0
+136 136.0
+137 137.0
+138 138.0
+143 143.0
+145 145.0
+146 146.0
+149 149.0
+15 15.0
+150 150.0
+152 152.0
+153 153.0
+155 155.0
+156 156.0
+157 157.0
+158 158.0
+160 160.0
+162 162.0
+163 163.0
+164 164.0
+165 165.0
+166 166.0
+167 167.0
+168 168.0
+169 169.0
+17 17.0
+170 170.0
+172 172.0
+174 174.0
+175 175.0
+176 176.0
+177 177.0
+178 178.0
+179 179.0
+18 18.0
+180 180.0
+181 181.0
+183 183.0
+186 186.0
+187 187.0
+189 189.0
+19 19.0
+190 190.0
+191 191.0
+192 192.0
+193 193.0
+194 194.0
+195 195.0
+196 196.0
+197 197.0
+199 199.0
+2 2.0
+20 20.0
+200 200.0
+201 201.0
+202 202.0
+203 203.0
+205 205.0
+207 207.0
+208 208.0
+209 209.0
+213 213.0
+214 214.0
+216 216.0
+217 217.0
+218 218.0
+219 219.0
+221 221.0
+222 222.0
+223 223.0
+224 224.0
+226 226.0
+228 228.0
+229 229.0
+230 230.0
+233 233.0
+235 235.0
+237 237.0
+238 238.0
+239 239.0
+24 24.0
+241 241.0
+242 242.0
+244 244.0
+247 247.0
+248 248.0
+249 249.0
+252 252.0
+255 255.0
+256 256.0
+257 257.0
+258 258.0
+26 26.0
+260 260.0
+262 262.0
+263 263.0
+265 265.0
+266 266.0
+27 27.0
+272 272.0
+273 273.0
+274 274.0
+275 275.0
+277 277.0
+278 278.0
+28 28.0
+280 280.0
+281 281.0
+282 282.0
+283 283.0
+284 284.0
+285 285.0
+286 286.0
+287 287.0
+288 288.0
+289 289.0
+291 291.0
+292 292.0
+296 296.0
+298 298.0
+30 30.0
+302 302.0
+305 305.0
+306 306.0
+307 307.0
+308 308.0
+309 309.0
+310 310.0
+311 311.0
+315 315.0
+316 316.0
+317 317.0
+318 318.0
+321 321.0
+322 322.0
+323 323.0
+325 325.0
+327 327.0
+33 33.0
+331 331.0
+332 332.0
+333 333.0
+335 335.0
+336 336.0
+338 338.0
+339 339.0
+34 34.0
+341 341.0
+342 342.0
+344 344.0
+345 345.0
+348 348.0
+35 35.0
+351 351.0
+353 353.0
+356 356.0
+360 360.0
+362 362.0
+364 364.0
+365 365.0
+366 366.0
+367 367.0
+368 368.0
+369 369.0
+37 37.0
+373 373.0
+374 374.0
+375 375.0
+377 377.0
+378 378.0
+379 379.0
+382 382.0
+384 384.0
+386 386.0
+389 389.0
+392 392.0
+393 393.0
+394 394.0
+395 395.0
+396 396.0
+397 397.0
+399 399.0
+4 4.0
+400 400.0
+401 401.0
+402 402.0
+403 403.0
+404 404.0
+406 406.0
+407 407.0
+409 409.0
+41 41.0
+411 411.0
+413 413.0
+414 414.0
+417 417.0
+418 418.0
+419 419.0
+42 42.0
+421 421.0
+424 424.0
+427 427.0
+429 429.0
+43 43.0
+430 430.0
+431 431.0
+432 432.0
+435 435.0
+436 436.0
+437 437.0
+438 438.0
+439 439.0
+44 44.0
+443 443.0
+444 444.0
+446 446.0
+448 448.0
+449 449.0
+452 452.0
+453 453.0
+454 454.0
+455 455.0
+457 457.0
+458 458.0
+459 459.0
+460 460.0
+462 462.0
+463 463.0
+466 466.0
+467 467.0
+468 468.0
+469 469.0
+47 47.0
+470 470.0
+472 472.0
+475 475.0
+477 477.0
+478 478.0
+479 479.0
+480 480.0
+481 481.0
+482 482.0
+483 483.0
+484 484.0
+485 485.0
+487 487.0
+489 489.0
+490 490.0
+491 491.0
+492 492.0
+493 493.0
+494 494.0
+495 495.0
+496 496.0
+497 497.0
+498 498.0
+5 5.0
+51 51.0
+53 53.0
+54 54.0
+57 57.0
+58 58.0
+64 64.0
+65 65.0
+66 66.0
+67 67.0
+69 69.0
+70 70.0
+72 72.0
+74 74.0
+76 76.0
+77 77.0
+78 78.0
+8 8.0
+80 80.0
+82 82.0
+83 83.0
+84 84.0
+85 85.0
+86 86.0
+87 87.0
+9 9.0
+90 90.0
+92 92.0
+95 95.0
+96 96.0
+97 97.0
+98 98.0