You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ja...@apache.org on 2014/04/24 07:59:36 UTC
svn commit: r1589596 - in /incubator/phoenix: phoenix-docs/src/main/org/h2/
phoenix-docs/src/main/org/h2/jdbc/ phoenix-docs/src/main/org/h2/jdbcx/
phoenix-docs/src/main/org/h2/tools/ site/publish/
site/publish/presentations/ site/source/src/site/markdo...
Author: jamestaylor
Date: Thu Apr 24 05:59:35 2014
New Revision: 1589596
URL: http://svn.apache.org/r1589596
Log:
Fix formatting on Pig Integration page, tweak join documentation for memory calculation
Modified:
incubator/phoenix/phoenix-docs/src/main/org/h2/Driver.java
incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java
incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java
incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcDatabaseMetaData.java
incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcResultSet.java
incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcStatement.java
incubator/phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcConnectionPool.java
incubator/phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcDataSource.java
incubator/phoenix/phoenix-docs/src/main/org/h2/tools/SimpleResultSet.java
incubator/phoenix/site/publish/joins.html
incubator/phoenix/site/publish/pig_integration.html
incubator/phoenix/site/publish/presentations/ApacheCon16x9.pdf
incubator/phoenix/site/source/src/site/markdown/pig_integration.md
incubator/phoenix/site/source/src/site/resources/presentations/ApacheCon16x9.pdf
incubator/phoenix/site/source/src/site/xhtml/joins.xhtml
Modified: incubator/phoenix/phoenix-docs/src/main/org/h2/Driver.java
URL: http://svn.apache.org/viewvc/incubator/phoenix/phoenix-docs/src/main/org/h2/Driver.java?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
--- incubator/phoenix/phoenix-docs/src/main/org/h2/Driver.java (original)
+++ incubator/phoenix/phoenix-docs/src/main/org/h2/Driver.java Thu Apr 24 05:59:35 2014
@@ -17,7 +17,7 @@ import org.h2.message.DbException;
import org.h2.message.TraceSystem;
import org.h2.upgrade.DbUpgrade;
-//## Java 1.7 ##
+/*## Java 1.7 ##
import java.util.logging.Logger;
//*/
@@ -138,7 +138,7 @@ public class Driver implements java.sql.
/**
* [Not supported]
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public Logger getParentLogger() {
return null;
}
Modified: incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java
URL: http://svn.apache.org/viewvc/incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
--- incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java (original)
+++ incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java Thu Apr 24 05:59:35 2014
@@ -1440,7 +1440,7 @@ public class JdbcCallableStatement exten
* @param parameterIndex the parameter index (1, 2, ...)
* @param type the class of the returned value
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public <T> T getObject(int parameterIndex, Class<T> type) {
return null;
}
@@ -1452,7 +1452,7 @@ public class JdbcCallableStatement exten
* @param parameterName the parameter name
* @param type the class of the returned value
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public <T> T getObject(String parameterName, Class<T> type) {
return null;
}
Modified: incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java
URL: http://svn.apache.org/viewvc/incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
--- incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java (original)
+++ incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java Thu Apr 24 05:59:35 2014
@@ -49,7 +49,7 @@ import java.sql.SQLXML;
import java.sql.SQLClientInfoException;
//*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
import java.util.concurrent.Executor;
//*/
@@ -1678,7 +1678,7 @@ public class JdbcConnection extends Trac
*
* @param schema the schema
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public void setSchema(String schema) {
// not supported
}
@@ -1687,7 +1687,7 @@ public class JdbcConnection extends Trac
/**
* [Not supported]
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public String getSchema() {
return null;
}
@@ -1698,7 +1698,7 @@ public class JdbcConnection extends Trac
*
* @param executor the executor used by this method
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public void abort(Executor executor) {
// not supported
}
@@ -1710,7 +1710,7 @@ public class JdbcConnection extends Trac
* @param executor the executor used by this method
* @param milliseconds the TCP connection timeout
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public void setNetworkTimeout(Executor executor, int milliseconds) {
// not supported
}
@@ -1719,7 +1719,7 @@ public class JdbcConnection extends Trac
/**
* [Not supported]
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public int getNetworkTimeout() {
return 0;
}
Modified: incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcDatabaseMetaData.java
URL: http://svn.apache.org/viewvc/incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcDatabaseMetaData.java?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
--- incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcDatabaseMetaData.java (original)
+++ incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcDatabaseMetaData.java Thu Apr 24 05:59:35 2014
@@ -2885,7 +2885,7 @@ public class JdbcDatabaseMetaData extend
/**
* [Not supported]
*/
- //## Java 1.7 ##
+ /*## Java 1.7 ##
public boolean generatedKeyAlwaysReturned() {
return true;
}
@@ -2902,7 +2902,7 @@ public class JdbcDatabaseMetaData extend
* @param columnNamePattern null (to get all objects) or a column name
* (uppercase for unquoted names)
*/
- //## Java 1.7 ##
+ /*## Java 1.7 ##
public ResultSet getPseudoColumns(String catalog, String schemaPattern,
String tableNamePattern, String columnNamePattern) {
return null;
Modified: incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcResultSet.java
URL: http://svn.apache.org/viewvc/incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcResultSet.java?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
--- incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcResultSet.java (original)
+++ incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcResultSet.java Thu Apr 24 05:59:35 2014
@@ -3431,7 +3431,7 @@ public class JdbcResultSet extends Trace
* @param columnIndex the column index (1, 2, ...)
* @param type the class of the returned value
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public <T> T getObject(int columnIndex, Class<T> type) {
return null;
}
@@ -3443,7 +3443,7 @@ public class JdbcResultSet extends Trace
* @param columnName the column name
* @param type the class of the returned value
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public <T> T getObject(String columnName, Class<T> type) {
return null;
}
Modified: incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcStatement.java
URL: http://svn.apache.org/viewvc/incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcStatement.java?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
--- incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcStatement.java (original)
+++ incubator/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcStatement.java Thu Apr 24 05:59:35 2014
@@ -876,7 +876,7 @@ public class JdbcStatement extends Trace
/**
* [Not supported]
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public void closeOnCompletion() {
// not supported
}
@@ -885,7 +885,7 @@ public class JdbcStatement extends Trace
/**
* [Not supported]
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public boolean isCloseOnCompletion() {
return true;
}
Modified: incubator/phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcConnectionPool.java
URL: http://svn.apache.org/viewvc/incubator/phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcConnectionPool.java?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
--- incubator/phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcConnectionPool.java (original)
+++ incubator/phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcConnectionPool.java Thu Apr 24 05:59:35 2014
@@ -35,7 +35,7 @@ import org.h2.util.New;
import org.h2.message.DbException;
//*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
import java.util.logging.Logger;
//*/
@@ -330,7 +330,7 @@ public class JdbcConnectionPool implemen
/**
* [Not supported]
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public Logger getParentLogger() {
return null;
}
Modified: incubator/phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcDataSource.java
URL: http://svn.apache.org/viewvc/incubator/phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcDataSource.java?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
--- incubator/phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcDataSource.java (original)
+++ incubator/phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcDataSource.java Thu Apr 24 05:59:35 2014
@@ -26,7 +26,7 @@ import org.h2.jdbc.JdbcConnection;
import org.h2.message.TraceObject;
import org.h2.util.StringUtils;
-//## Java 1.7 ##
+/*## Java 1.7 ##
import java.util.logging.Logger;
//*/
@@ -381,7 +381,7 @@ public class JdbcDataSource extends Trac
/**
* [Not supported]
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public Logger getParentLogger() {
return null;
}
Modified: incubator/phoenix/phoenix-docs/src/main/org/h2/tools/SimpleResultSet.java
URL: http://svn.apache.org/viewvc/incubator/phoenix/phoenix-docs/src/main/org/h2/tools/SimpleResultSet.java?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
--- incubator/phoenix/phoenix-docs/src/main/org/h2/tools/SimpleResultSet.java (original)
+++ incubator/phoenix/phoenix-docs/src/main/org/h2/tools/SimpleResultSet.java Thu Apr 24 05:59:35 2014
@@ -853,7 +853,7 @@ public class SimpleResultSet implements
* @param columnIndex the column index (1, 2, ...)
* @param type the class of the returned value
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public <T> T getObject(int columnIndex, Class<T> type) {
return null;
}
@@ -865,7 +865,7 @@ public class SimpleResultSet implements
* @param columnName the column name
* @param type the class of the returned value
*/
-//## Java 1.7 ##
+/*## Java 1.7 ##
public <T> T getObject(String columnName, Class<T> type) {
return null;
}
Modified: incubator/phoenix/site/publish/joins.html
URL: http://svn.apache.org/viewvc/incubator/phoenix/site/publish/joins.html?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
--- incubator/phoenix/site/publish/joins.html (original)
+++ incubator/phoenix/site/publish/joins.html Thu Apr 24 05:59:35 2014
@@ -422,9 +422,9 @@ CLIENT MERGE SORT
</ul></li>
<li> <p>phoenix.query.maxGlobalMemoryPercentage</p>
<ul>
- <li>Percentage of total heap memory (i.e. Runtime.getRuntime().totalMemory()) that all threads may use.</li>
+ <li>Percentage of total heap memory (i.e. Runtime.getRuntime().maxMemory()) that all threads may use.</li>
<li>The summed size of all living caches must be smaller than this global memory pool size. Otherwise, you would get an <tt>InsufficientMemoryException</tt>.</li>
- <li><b>Default: 20</b></li>
+ <li><b>Default: 15</b></li>
</ul></li>
<li> <p>phoenix.coprocessor.maxServerCacheTimeToLiveMs</p>
<ul>
Modified: incubator/phoenix/site/publish/pig_integration.html
URL: http://svn.apache.org/viewvc/incubator/phoenix/site/publish/pig_integration.html?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
--- incubator/phoenix/site/publish/pig_integration.html (original)
+++ incubator/phoenix/site/publish/pig_integration.html Thu Apr 24 05:59:35 2014
@@ -128,7 +128,6 @@
<p>The StoreFunc allows users to write data in Phoenix-encoded format to HBase tables using Pig scripts. This is a nice way to bulk upload data from a MapReduce job in parallel to a Phoenix table in HBase. All you need to specify is the endpoint address, HBase table name and a batch size. For example:</p>
<div class="source">
<pre>A = load 'testdata' as (a:chararray, b:chararray, c:chararray, d:chararray, e: datetime);
-
STORE A into 'hbase://CORE.ENTITY_HISTORY' using
org.apache.phoenix.pig.PhoenixHBaseStorage('localhost','-batchSize 5000');
</pre>
@@ -145,37 +144,52 @@ STORE A into 'hbase://CORE.ENTITY_HISTOR
<h2 id="Pig_Loader">Pig Loader</h2>
<p>A Pig data loader allows users to read data from Phoenix backed HBase tables within a Pig script. </p>
<p>The Load func provides two alternative ways to load data.</p>
- <div class="source">
- <pre> 1. Given a Table Name
- A = load 'hbase://table/HIRES' using org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
- The above loads the data for all the columns in HIRES table.
-
- To restrict the list of columns , you can specify the column names as part of LOAD as below
- A = load 'hbase://table/HIRES/ID,NAME' using org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
- Here, only data for ID and NAME columns are returned.
-
- 2. Given a Query
- A = load 'hbase://query/SELECT ID,NAME FROM HIRES WHERE AGE > 50' using org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
- The above query loads data of all those rows whose AGE column has a value > 50 . The LOAD func merely executes the given SQL query and returns the results.
-
- Though there is a provision to provide a query as part of LOAD, it is more restrictive to the following
- a) Should be a SELECT query only.
- b) Shouldn't contain any GROUP BY , ORDER BY , LIMIT , DISTINCT clauses within the query.
- c) Shouldn't contain any of AGGREGATE functions.
+ <ol style="list-style-type: decimal">
+ <li> <p>Given a table name, the following will load the data for all the columns in the HIRES table:</p>
+ <div class="source">
+ <pre>A = load 'hbase://table/HIRES' using org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
</pre>
- </div>
+ </div>To restrict the list of columns, you may specify the column names as part of LOAD as shown below:
+ <div class="source">
+ <pre>A = load 'hbase://table/HIRES/ID,NAME' using org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
+</pre>
+ </div> <p>Here, only data for ID and NAME columns are returned.</p></li>
+ <li> <p>Given a query, the following loads data for all those rows whose AGE column has a value of greater than 50:</p>
+ <div class="source">
+ <pre>A = load 'hbase://query/SELECT ID,NAME FROM HIRES WHERE AGE > 50' using org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
+</pre>
+ </div>The LOAD func merely executes the given SQL query and returns the results. Though there is a provision to provide a query as part of LOAD, it is restricted to the following:
+ <ul>
+ <li>Only a SELECT query is allowed. No DML statements such as UPSERT or DELETE.</li>
+ <li>The query may not contain any GROUP BY, ORDER BY, LIMIT, or DISTINCT clauses.</li>
+ <li>The query may not contain any AGGREGATE functions.</li>
+ </ul></li>
+ </ol>
<p>In both the cases, the zookeeper quorum should be passed to the PhoenixHBaseLoader as an argument to the constructor. </p>
<p>The Loadfunc makes best effort to map Phoenix Data Types to Pig datatype. You can have a look at org.apache.phoenix.pig.util.TypeUtil to see how each of Phoenix data type is mapped to Pig data type.</p>
- <p>####TODO 1. Support for ARRAY data type. 2. Usage of String, Date functions within the provided SQL Query.</p>
- <p>####Example : <b>Goal:</b> Determine the number of users by a CLIENT ID.</p>
- <p><b>Ddl:</b> CREATE TABLE HIRES( CLIENTID INTEGER NOT NULL, EMPID INTEGER NOT NULL, NAME VARCHAR CONSTRAINT pk PRIMARY KEY(CLIENTID,EMPID));</p>
- <p><b>Pig Script:</b> </p>
- <div class="source">
- <pre> raw = LOAD 'hbase://table/HIRES USING org.apache.phoenix.pig.PhoenixHBaseLoader('localhost')';
- grpd = GROUP raw BY CLIENTID;
- cnt = FOREACH grpd GENERATE group AS CLIENT,COUNT(raw);
- DUMP cnt;
+ <div class="section">
+ <h3 id="Example">Example</h3>
+ <p>Determine the number of users by a CLIENT ID</p>
+ <p><b>Ddl</b></p>
+ <div class="source">
+ <pre>CREATE TABLE HIRES( CLIENTID INTEGER NOT NULL, EMPID INTEGER NOT NULL, NAME VARCHAR CONSTRAINT pk PRIMARY KEY(CLIENTID,EMPID));
</pre>
+ </div>
+ <p><b>Pig Script</b> </p>
+ <div class="source">
+ <pre>raw = LOAD 'hbase://table/HIRES USING org.apache.phoenix.pig.PhoenixHBaseLoader('localhost')';
+grpd = GROUP raw BY CLIENTID;
+cnt = FOREACH grpd GENERATE group AS CLIENT,COUNT(raw);
+DUMP cnt;
+</pre>
+ </div>
+ </div>
+ <div class="section">
+ <h3 id="Future_Work">Future Work</h3>
+ <ol style="list-style-type: decimal">
+ <li>Support for ARRAY data type.</li>
+ <li>Usage of expressions within the SELECT clause when providing a full query.</li>
+ </ol>
</div>
</div>
</div>
Modified: incubator/phoenix/site/publish/presentations/ApacheCon16x9.pdf
URL: http://svn.apache.org/viewvc/incubator/phoenix/site/publish/presentations/ApacheCon16x9.pdf?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
Binary files - no diff available.
Modified: incubator/phoenix/site/source/src/site/markdown/pig_integration.md
URL: http://svn.apache.org/viewvc/incubator/phoenix/site/source/src/site/markdown/pig_integration.md?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
--- incubator/phoenix/site/source/src/site/markdown/pig_integration.md (original)
+++ incubator/phoenix/site/source/src/site/markdown/pig_integration.md Thu Apr 24 05:59:35 2014
@@ -7,7 +7,6 @@ Pig integration may be divided into two
The StoreFunc allows users to write data in Phoenix-encoded format to HBase tables using Pig scripts. This is a nice way to bulk upload data from a MapReduce job in parallel to a Phoenix table in HBase. All you need to specify is the endpoint address, HBase table name and a batch size. For example:
A = load 'testdata' as (a:chararray, b:chararray, c:chararray, d:chararray, e: datetime);
-
STORE A into 'hbase://CORE.ENTITY_HISTORY' using
org.apache.phoenix.pig.PhoenixHBaseStorage('localhost','-batchSize 5000');
@@ -25,39 +24,43 @@ A Pig data loader allows users to read d
The Load func provides two alternative ways to load data.
- 1. Given a Table Name
- A = load 'hbase://table/HIRES' using org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
- The above loads the data for all the columns in HIRES table.
-
- To restrict the list of columns , you can specify the column names as part of LOAD as below
- A = load 'hbase://table/HIRES/ID,NAME' using org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
- Here, only data for ID and NAME columns are returned.
-
- 2. Given a Query
- A = load 'hbase://query/SELECT ID,NAME FROM HIRES WHERE AGE > 50' using org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
- The above query loads data of all those rows whose AGE column has a value > 50 . The LOAD func merely executes the given SQL query and returns the results.
-
- Though there is a provision to provide a query as part of LOAD, it is more restrictive to the following
- a) Should be a SELECT query only.
- b) Shouldn't contain any GROUP BY , ORDER BY , LIMIT , DISTINCT clauses within the query.
- c) Shouldn't contain any of AGGREGATE functions.
+1. Given a table name, the following will load the data for all the columns in the HIRES table:
+
+ A = load 'hbase://table/HIRES' using org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
+ To restrict the list of columns, you may specify the column names as part of LOAD as shown below:
+
+ A = load 'hbase://table/HIRES/ID,NAME' using org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
+
+ Here, only data for ID and NAME columns are returned.
+
+2. Given a query, the following loads data for all those rows whose AGE column has a value of greater than 50:
+
+ A = load 'hbase://query/SELECT ID,NAME FROM HIRES WHERE AGE > 50' using org.apache.phoenix.pig.PhoenixHBaseLoader('localhost');
+ The LOAD func merely executes the given SQL query and returns the results. Though there is a provision to provide a query as part of LOAD, it is restricted to the following:
+
+ * Only a SELECT query is allowed. No DML statements such as UPSERT or DELETE.
+ * The query may not contain any GROUP BY, ORDER BY, LIMIT, or DISTINCT clauses.
+ * The query may not contain any AGGREGATE functions.
- In both the cases, the zookeeper quorum should be passed to the PhoenixHBaseLoader as an argument to the constructor.
+In both the cases, the zookeeper quorum should be passed to the PhoenixHBaseLoader as an argument to the constructor.
- The Loadfunc makes best effort to map Phoenix Data Types to Pig datatype. You can have a look at org.apache.phoenix.pig.util.TypeUtil to see how each of Phoenix data type is mapped to Pig data type.
+The Loadfunc makes best effort to map Phoenix Data Types to Pig datatype. You can have a look at org.apache.phoenix.pig.util.TypeUtil to see how each of Phoenix data type is mapped to Pig data type.
- ####TODO
- 1. Support for ARRAY data type.
- 2. Usage of String, Date functions within the provided SQL Query.
-
- ####Example :
- **Goal:** Determine the number of users by a CLIENT ID.
+###Example
+Determine the number of users by a CLIENT ID
- **Ddl:** CREATE TABLE HIRES( CLIENTID INTEGER NOT NULL, EMPID INTEGER NOT NULL, NAME VARCHAR CONSTRAINT pk PRIMARY KEY(CLIENTID,EMPID));
+**Ddl**
+
+ CREATE TABLE HIRES( CLIENTID INTEGER NOT NULL, EMPID INTEGER NOT NULL, NAME VARCHAR CONSTRAINT pk PRIMARY KEY(CLIENTID,EMPID));
- **Pig Script:**
+**Pig Script**
- raw = LOAD 'hbase://table/HIRES USING org.apache.phoenix.pig.PhoenixHBaseLoader('localhost')';
- grpd = GROUP raw BY CLIENTID;
- cnt = FOREACH grpd GENERATE group AS CLIENT,COUNT(raw);
- DUMP cnt;
\ No newline at end of file
+ raw = LOAD 'hbase://table/HIRES USING org.apache.phoenix.pig.PhoenixHBaseLoader('localhost')';
+ grpd = GROUP raw BY CLIENTID;
+ cnt = FOREACH grpd GENERATE group AS CLIENT,COUNT(raw);
+ DUMP cnt;
+
+###Future Work
+ 1. Support for ARRAY data type.
+ 2. Usage of expressions within the SELECT clause when providing a full query.
+
Modified: incubator/phoenix/site/source/src/site/resources/presentations/ApacheCon16x9.pdf
URL: http://svn.apache.org/viewvc/incubator/phoenix/site/source/src/site/resources/presentations/ApacheCon16x9.pdf?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
Binary files - no diff available.
Modified: incubator/phoenix/site/source/src/site/xhtml/joins.xhtml
URL: http://svn.apache.org/viewvc/incubator/phoenix/site/source/src/site/xhtml/joins.xhtml?rev=1589596&r1=1589595&r2=1589596&view=diff
==============================================================================
--- incubator/phoenix/site/source/src/site/xhtml/joins.xhtml (original)
+++ incubator/phoenix/site/source/src/site/xhtml/joins.xhtml Thu Apr 24 05:59:35 2014
@@ -301,9 +301,9 @@ CLIENT MERGE SORT
</ul></li>
<li> <p>phoenix.query.maxGlobalMemoryPercentage</p>
<ul>
- <li>Percentage of total heap memory (i.e. Runtime.getRuntime().totalMemory()) that all threads may use.</li>
+ <li>Percentage of total heap memory (i.e. Runtime.getRuntime().maxMemory()) that all threads may use.</li>
<li>The summed size of all living caches must be smaller than this global memory pool size. Otherwise, you would get an <tt>InsufficientMemoryException</tt>.</li>
- <li><b>Default: 20</b></li>
+ <li><b>Default: 15</b></li>
</ul></li>
<li> <p>phoenix.coprocessor.maxServerCacheTimeToLiveMs</p>
<ul>