You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ya...@apache.org on 2021/02/15 21:23:12 UTC

[phoenix] branch 4.16 updated (91739be -> 47a52bc)

This is an automated email from the ASF dual-hosted git repository.

yanxinyi pushed a change to branch 4.16
in repository https://gitbox.apache.org/repos/asf/phoenix.git.


    from 91739be  PHOENIX-6380 phoenix-client-embedded depends on logging classes
     new f6b030f  PHOENIX-6344: CASCADE on ALTER should NOOP when there are no secondary indexes (#1135)
     new 47a52bc  PHOENIX-6343 : Phoenix allows duplicate column names when one of them is a primary key (#1118)

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../phoenix/end2end/AlterAddCascadeIndexIT.java    |  83 +++++++++----
 .../org/apache/phoenix/end2end/CreateTableIT.java  | 131 +++++++++++++++++++++
 .../monitoring/PhoenixLoggingMetricsIT.java        |  17 +--
 .../phoenix/coprocessor/AddColumnMutator.java      |  24 ++++
 .../org/apache/phoenix/schema/MetaDataClient.java  |  35 +++++-
 .../apache/phoenix/compile/QueryOptimizerTest.java |   5 +-
 6 files changed, 254 insertions(+), 41 deletions(-)


[phoenix] 01/02: PHOENIX-6344: CASCADE on ALTER should NOOP when there are no secondary indexes (#1135)

Posted by ya...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

yanxinyi pushed a commit to branch 4.16
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit f6b030f751385d74704b1b32ba8fcd056da6b8b8
Author: Swaroopa Kadam <sw...@gmail.com>
AuthorDate: Tue Feb 9 13:52:29 2021 -0800

    PHOENIX-6344: CASCADE on ALTER should NOOP when there are no secondary indexes (#1135)
---
 .../phoenix/end2end/AlterAddCascadeIndexIT.java    | 83 +++++++++++++++-------
 .../org/apache/phoenix/schema/MetaDataClient.java  | 12 +++-
 2 files changed, 68 insertions(+), 27 deletions(-)

diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterAddCascadeIndexIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterAddCascadeIndexIT.java
index def4814..dc35013 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterAddCascadeIndexIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterAddCascadeIndexIT.java
@@ -64,15 +64,15 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
     public ExpectedException exception = ExpectedException.none();
     private static Connection conn;
     private Properties prop;
-    private boolean isViewIndex, mutable;
+    private boolean isViewScenario, mutable;
     private String phoenixObjectName;
     private String indexNames;
     private final String tableDDLOptions;
     String fullIndexNameOne, fullIndexNameTwo, fullTableName;
 
 
-    public AlterAddCascadeIndexIT(boolean isViewIndex, boolean mutable) {
-        this.isViewIndex = isViewIndex;
+    public AlterAddCascadeIndexIT(boolean isViewScenario, boolean mutable) {
+        this.isViewScenario = isViewScenario;
         StringBuilder optionBuilder = new StringBuilder("COLUMN_ENCODED_BYTES=0");
         if (!mutable) {
 
@@ -110,7 +110,7 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
                 "      population BIGINT,\n" +
                 "      CONSTRAINT my_pk PRIMARY KEY (state, city)) " + tableDDLOptions);
 
-        if(isViewIndex) {
+        if(isViewScenario) {
             conn.createStatement().execute("CREATE VIEW IF NOT EXISTS " + fullViewName +
                     " (city_area INTEGER, avg_fam_size INTEGER) AS " +
                     "SELECT * FROM "+fullTableName+" WHERE state = 'CA'");
@@ -136,10 +136,10 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
     // Test with ALTER TABLE/VIEW CASCADE INDEX ALL with upserting into new column
     @Test
     public void testAlterDBOAddCascadeIndexAllUpsert() throws Exception {
-        String query = "ALTER " +(isViewIndex? "VIEW " : "TABLE ") + phoenixObjectName + " ADD new_column_3 VARCHAR(64) CASCADE INDEX ALL";
+        String query = "ALTER " +(isViewScenario ? "VIEW " : "TABLE ") + phoenixObjectName + " ADD new_column_3 VARCHAR(64) CASCADE INDEX ALL";
         conn.createStatement().execute(query);
         PreparedStatement ps;
-        if(isViewIndex) {
+        if(isViewScenario) {
             ps = conn.prepareStatement("UPSERT INTO " + phoenixObjectName +
                     "(state,city,population,city_area,avg_fam_size,new_column_3) " +
                     "VALUES('CA','Santa Barbara',912332,1300,4,'test_column')");
@@ -151,7 +151,7 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
         ps.executeUpdate();
         ColumnInfo [] columnArray =  {new ColumnInfo("new_column_3", PVarchar.INSTANCE.getSqlType(), 64)};
         ColumnInfo [] columnIndexArray =  {new ColumnInfo("0:new_column_3", PVarchar.INSTANCE.getSqlType(), 64)};
-        if(isViewIndex) {
+        if(isViewScenario) {
             assertDBODefinition(conn, phoenixObjectName, PTableType.VIEW, 6, columnArray, false);
             assertDBODefinition(conn, fullIndexNameOne, PTableType.INDEX, 5, columnIndexArray, false);
             assertDBODefinition(conn, fullIndexNameTwo, PTableType.INDEX, 5, columnIndexArray, false);
@@ -172,16 +172,51 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
 
     }
 
+    // Test with ALTER TABLE/VIEW CASCADE INDEX ALL with no indexes
+    @Test
+    public void testAlterDBOAddCascadeIndexAll_noIndexes() throws Exception {
+        String schemaName = "S_"+generateUniqueName();
+        String tableName = "T_"+generateUniqueName();
+        String viewName = "V_"+generateUniqueName();
+
+        String fullViewName = SchemaUtil.getQualifiedTableName(schemaName, viewName);
+        String fullTableName = SchemaUtil.getQualifiedTableName(schemaName, tableName);
+
+        conn.createStatement().execute("CREATE TABLE IF NOT EXISTS " + fullTableName + " (\n" +
+                "      state CHAR(2) NOT NULL,\n" +
+                "      city VARCHAR NOT NULL,\n" +
+                "      population BIGINT,\n" +
+                "      CONSTRAINT my_pk PRIMARY KEY (state, city)) " + tableDDLOptions);
+
+        if(isViewScenario) {
+            conn.createStatement().execute("CREATE VIEW IF NOT EXISTS " + fullViewName
+                    + " (city_area INTEGER, avg_fam_size INTEGER) AS " + "SELECT * FROM "
+                    + fullTableName + " WHERE state = 'CA'");
+        }
+
+        String query = "ALTER " +(isViewScenario ? "VIEW " : "TABLE ") + (isViewScenario ? fullViewName : fullTableName)
+                + " ADD new_column_3 VARCHAR(64) CASCADE INDEX ALL";
+        conn.createStatement().execute(query);
+
+        ColumnInfo [] columnArray =  {new ColumnInfo("new_column_3", PVarchar.INSTANCE.getSqlType(), 64)};
+        if(isViewScenario) {
+            assertDBODefinition(conn, fullViewName, PTableType.VIEW, 6, columnArray, false);
+        } else {
+            assertDBODefinition(conn, fullTableName, PTableType.TABLE, 4, columnArray, false);
+        }
+
+    }
+
     // Test with CASCADE INDEX <index_name>
     @Test
     public void testAlterDBOAddCascadeIndex() throws Exception {
         ColumnInfo [] columnArray =  {new ColumnInfo("new_column_1", PFloat.INSTANCE.getSqlType())};
         ColumnInfo [] columnIndexArray =  {new ColumnInfo("0:new_column_1", PDecimal.INSTANCE.getSqlType())};
 
-        String query = "ALTER " + (isViewIndex? "VIEW " : "TABLE ")
+        String query = "ALTER " + (isViewScenario ? "VIEW " : "TABLE ")
                 + phoenixObjectName + " ADD new_column_1 FLOAT CASCADE INDEX " + indexNames.split(",")[0];
         conn.createStatement().execute(query);
-        if(isViewIndex) {
+        if(isViewScenario) {
             assertDBODefinition(conn, phoenixObjectName, PTableType.VIEW, 6, columnArray, false);
             assertDBODefinition(conn, fullIndexNameOne, PTableType.INDEX, 5, columnIndexArray, false);
             assertDBODefinition(conn, fullIndexNameTwo, PTableType.INDEX, 4, columnIndexArray, true);
@@ -199,10 +234,10 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
                 new ColumnInfo("new_column_2", PDouble.INSTANCE.getSqlType())};
         ColumnInfo [] columnIndexArray =  {new ColumnInfo("0:new_column_1", PDecimal.INSTANCE.getSqlType()),
                 new ColumnInfo("0:new_column_2", PDecimal.INSTANCE.getSqlType())};
-        String query = "ALTER " + (isViewIndex ? "VIEW " : "TABLE ") + phoenixObjectName
+        String query = "ALTER " + (isViewScenario ? "VIEW " : "TABLE ") + phoenixObjectName
                 + " ADD new_column_1 FLOAT, new_column_2 DOUBLE CASCADE INDEX " + indexNames.split(",")[0];
         conn.createStatement().execute(query);
-        if(isViewIndex) {
+        if(isViewScenario) {
             assertDBODefinition(conn, phoenixObjectName, PTableType.VIEW, 7, columnArray, false);
             assertDBODefinition(conn, fullIndexNameOne, PTableType.INDEX, 6, columnIndexArray, false);
             assertDBODefinition(conn, fullIndexNameTwo, PTableType.INDEX, 4, columnIndexArray, true);
@@ -219,10 +254,10 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
     public void testAlterDBOAddCascadeIndexes() throws Exception {
         ColumnInfo [] columnArray = {new ColumnInfo("new_column_1", PDouble.INSTANCE.getSqlType())};
         ColumnInfo [] columnIndexArray = {new ColumnInfo("0:new_column_1", PDecimal.INSTANCE.getSqlType())};
-        String query = "ALTER " + (isViewIndex ? "VIEW " : "TABLE ")
+        String query = "ALTER " + (isViewScenario ? "VIEW " : "TABLE ")
                 + phoenixObjectName + " ADD new_column_1 DOUBLE CASCADE INDEX " + indexNames;
         conn.createStatement().execute(query);
-        if(isViewIndex) {
+        if(isViewScenario) {
             assertDBODefinition(conn, phoenixObjectName, PTableType.VIEW, 6, columnArray, false);
             assertDBODefinition(conn, fullIndexNameOne, PTableType.INDEX, 5, columnIndexArray, false);
             assertDBODefinition(conn, fullIndexNameTwo, PTableType.INDEX, 5, columnIndexArray, false);
@@ -241,10 +276,10 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
         ColumnInfo [] columIndexArray =  {new ColumnInfo("0:new_column_1", PDecimal.INSTANCE.getSqlType()),
                 new ColumnInfo("0:new_column_2", PDecimal.INSTANCE.getSqlType())};
 
-        String query = "ALTER " + (isViewIndex ? "VIEW " : "TABLE ")
+        String query = "ALTER " + (isViewScenario ? "VIEW " : "TABLE ")
                 + phoenixObjectName + " ADD new_column_1 FLOAT, new_column_2 DOUBLE CASCADE INDEX " + indexNames;
         conn.createStatement().execute(query);
-        if(isViewIndex) {
+        if(isViewScenario) {
             assertDBODefinition(conn, phoenixObjectName, PTableType.VIEW, 7, columnArray, false);
             assertDBODefinition(conn, fullIndexNameOne, PTableType.INDEX, 6, columIndexArray, false);
             assertDBODefinition(conn, fullIndexNameTwo, PTableType.INDEX, 6, columIndexArray, false);
@@ -260,14 +295,14 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
     @Test
     public void testAlterDBOException() throws SQLException {
 
-        String query = "ALTER " + (isViewIndex ? "VIEW " : "TABLE ") + phoenixObjectName + " ADD new_column VARCHAR ALL";
+        String query = "ALTER " + (isViewScenario ? "VIEW " : "TABLE ") + phoenixObjectName + " ADD new_column VARCHAR ALL";
         try {
             conn.createStatement().execute(query);
         } catch (Exception e) {
             assertTrue(e.getMessage().contains(SYNTAX_ERROR));
         }
 
-        query = "ALTER " + (isViewIndex ? "VIEW " : "TABLE ") + phoenixObjectName
+        query = "ALTER " + (isViewScenario ? "VIEW " : "TABLE ") + phoenixObjectName
                 + " ADD new_column VARCHAR CASCADE " + indexNames.split(",")[0];
         try {
             conn.createStatement().execute(query);
@@ -275,7 +310,7 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
             assertTrue(e.getMessage().contains(SYNTAX_ERROR));
         }
 
-        query = "ALTER " + (isViewIndex ? "VIEW " : "TABLE ") + phoenixObjectName
+        query = "ALTER " + (isViewScenario ? "VIEW " : "TABLE ") + phoenixObjectName
                 + " ADD new_column VARCHAR INDEX " + indexNames.split(",")[0];
         try {
             conn.createStatement().execute(query);
@@ -283,7 +318,7 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
             assertTrue(e.getMessage().contains(SYNTAX_ERROR));
         }
 
-        query = "ALTER " + (isViewIndex ? "VIEW " : "TABLE ")
+        query = "ALTER " + (isViewScenario ? "VIEW " : "TABLE ")
                 + phoenixObjectName + " ADD new_column_1 DOUBLE CASCADE INDEX INCORRECT_NAME";
         try {
             conn.createStatement().execute(query);
@@ -294,13 +329,13 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
         String localIndex = generateUniqueName();
         String createLocalIndex = "CREATE LOCAL INDEX " + localIndex + " ON "
                 + phoenixObjectName + "(avg_fam_size) INCLUDE (population)";
-        if(!isViewIndex) {
+        if(!isViewScenario) {
             createLocalIndex = "CREATE LOCAL INDEX " + localIndex + " ON "
                     + phoenixObjectName + "(state, population)";
 
         }
         conn.createStatement().execute(createLocalIndex);
-        query = "ALTER " + (isViewIndex ? "VIEW " : "TABLE ")
+        query = "ALTER " + (isViewScenario ? "VIEW " : "TABLE ")
                 + phoenixObjectName + " ADD new_column_1 DOUBLE CASCADE INDEX "+localIndex;
         try {
             conn.createStatement().execute(query);
@@ -309,7 +344,7 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
                     .NOT_SUPPORTED_CASCADE_FEATURE_LOCAL_INDEX.getMessage()));
         }
 
-        query = "ALTER " + (isViewIndex ? "VIEW " : "TABLE ")
+        query = "ALTER " + (isViewScenario ? "VIEW " : "TABLE ")
                 + phoenixObjectName + " ADD new_column_2 DOUBLE CASCADE INDEX "+localIndex + "," + indexNames;
         try {
             conn.createStatement().execute(query);
@@ -323,7 +358,7 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
     // Exception for incorrect index name
     @Test
     public void testAlterDBOIncorrectIndexNameCombination() throws Exception {
-        String query = "ALTER " + (isViewIndex ? "VIEW " : "TABLE ")
+        String query = "ALTER " + (isViewScenario ? "VIEW " : "TABLE ")
                 + phoenixObjectName + " ADD new_column_1 DOUBLE CASCADE INDEX INCORRECT_NAME, "+ indexNames;
         try {
             conn.createStatement().execute(query);
@@ -332,7 +367,7 @@ public class AlterAddCascadeIndexIT extends ParallelStatsDisabledIT {
         }
         ColumnInfo [] columnArray =  {new ColumnInfo("new_column_1", PFloat.INSTANCE.getSqlType())};
         ColumnInfo [] columnIndexArray =  {new ColumnInfo("0:new_column_1", PDecimal.INSTANCE.getSqlType())};
-        if(isViewIndex) {
+        if(isViewScenario) {
             assertDBODefinition(conn, phoenixObjectName, PTableType.VIEW, 5, columnArray, true);
             assertDBODefinition(conn, fullIndexNameOne, PTableType.INDEX, 4, columnIndexArray, true);
             assertDBODefinition(conn, fullIndexNameTwo, PTableType.INDEX, 4, columnIndexArray, true);
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
index 919e072..6b54b53 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
@@ -3765,8 +3765,13 @@ public class MetaDataClient {
         // if cascade keyword is passed and indexes are provided either implicitly or explicitly
         if (cascade && (indexes == null || !indexes.isEmpty())) {
             indexesPTable = getIndexesPTableForCascade(indexes, table);
-            for (PTable index : indexesPTable) {
-                indexToColumnSizeMap.put(index, index.getColumns().size());
+            if(indexesPTable.size() == 0) {
+                // go back to regular behavior of altering the table/view
+                cascade = false;
+            } else {
+                for (PTable index : indexesPTable) {
+                    indexToColumnSizeMap.put(index, index.getColumns().size());
+                }
             }
         }
         boolean wasAutoCommit = connection.getAutoCommit();
@@ -4232,7 +4237,8 @@ public class MetaDataClient {
                 // this if clause ensures we only get the indexes that
                 // are only created on the view itself.
                 if (index.getIndexType().equals(IndexType.LOCAL)
-                        || (isView && index.getTableName().toString().contains("#"))) {
+                        || (isView && index.getTableName().toString().contains(
+                        QueryConstants.CHILD_VIEW_INDEX_NAME_SEPARATOR))) {
                     indexesPTable.remove(index);
                 }
             }


[phoenix] 02/02: PHOENIX-6343 : Phoenix allows duplicate column names when one of them is a primary key (#1118)

Posted by ya...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

yanxinyi pushed a commit to branch 4.16
in repository https://gitbox.apache.org/repos/asf/phoenix.git

commit 47a52bc40dae54746f590a718dafd8606520b8d6
Author: Viraj Jasani <vj...@apache.org>
AuthorDate: Sun Feb 14 12:48:00 2021 +0530

    PHOENIX-6343 : Phoenix allows duplicate column names when one of them is a primary key (#1118)
    
    Signed-off-by: Geoffrey Jacoby <gj...@apache.org>
---
 .../org/apache/phoenix/end2end/CreateTableIT.java  | 131 +++++++++++++++++++++
 .../monitoring/PhoenixLoggingMetricsIT.java        |  17 +--
 .../phoenix/coprocessor/AddColumnMutator.java      |  24 ++++
 .../org/apache/phoenix/schema/MetaDataClient.java  |  23 +++-
 .../apache/phoenix/compile/QueryOptimizerTest.java |   5 +-
 5 files changed, 186 insertions(+), 14 deletions(-)

diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
index 3a79b65..b69c02b 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
@@ -52,6 +52,7 @@ import org.apache.phoenix.query.BaseTest;
 import org.apache.phoenix.query.KeyRange;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesOptions;
+import org.apache.phoenix.schema.ColumnAlreadyExistsException;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.ImmutableStorageScheme;
 import org.apache.phoenix.schema.PTable.QualifierEncodingScheme;
@@ -91,6 +92,136 @@ public class CreateTableIT extends ParallelStatsDisabledIT {
     }
 
     @Test
+    public void testCreateAlterTableWithDuplicateColumn() throws Exception {
+        Properties props = new Properties();
+        int failureCount = 0;
+        int expectedExecCount = 0;
+        String tableName = generateUniqueName();
+        String viewName = generateUniqueName();
+        try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+            try {
+                // Case 1 - Adding a column as "Default_CF.Column" in
+                // CREATE TABLE query where "Column" is same as single PK Column
+                conn.createStatement().execute(String.format("CREATE TABLE %s"
+                        + " (name VARCHAR NOT NULL PRIMARY KEY, city VARCHAR,"
+                        + " name1 VARCHAR, name VARCHAR)", tableName));
+                fail("Should have failed with ColumnAlreadyExistsException");
+            } catch (ColumnAlreadyExistsException e) {
+                // expected
+                failureCount++;
+            }
+            try {
+                // Case 2 - Adding a column as "Default_CF.Column" in CREATE
+                // TABLE query where "Default_CF.Column" already exists as non-Pk column
+                conn.createStatement().execute(String.format("CREATE TABLE %s"
+                        + " (name VARCHAR NOT NULL PRIMARY KEY, city VARCHAR,"
+                        + " name1 VARCHAR, name1 VARCHAR)", tableName));
+                fail("Should have failed with ColumnAlreadyExistsException");
+            } catch (ColumnAlreadyExistsException e) {
+                // expected
+                failureCount++;
+            }
+            try {
+                // Case 3 - Adding a column as "Default_CF.Column" in CREATE
+                // TABLE query where "Column" is same as single PK Column.
+                // The only diff from Case 1 is that both PK Column and
+                // "Default_CF.Column" are of different DataType
+                conn.createStatement().execute(String.format("CREATE TABLE %s"
+                        + " (key1 VARCHAR NOT NULL PRIMARY KEY, city VARCHAR,"
+                        + " name1 VARCHAR, key1 INTEGER)", tableName));
+                fail("Should have failed with ColumnAlreadyExistsException");
+            } catch (ColumnAlreadyExistsException e) {
+                // expected
+                failureCount++;
+            }
+            try {
+                // Case 4 - Adding a column as "Default_CF.Column" in
+                // CREATE TABLE query where "Column" is same as one of the
+                // PK Column from Composite PK Columns
+                conn.createStatement().execute(String.format("CREATE TABLE %s"
+                        + " (name VARCHAR NOT NULL, name1 VARCHAR NOT NULL,"
+                        + " name2 VARCHAR, name VARCHAR"
+                        + " CONSTRAINT PK PRIMARY KEY(name, name1))", tableName));
+                fail("Should have failed with ColumnAlreadyExistsException");
+            } catch (ColumnAlreadyExistsException e) {
+                // expected
+                failureCount++;
+            }
+            try {
+                // Case 5 - Adding a column as "Default_CF.Column" in
+                // CREATE VIEW query where "Column" is same as one of the
+                // PK Column from Composite PK Columns derived from parent table
+                conn.createStatement().execute(String.format("CREATE TABLE %s"
+                        + " (name VARCHAR NOT NULL, name1 VARCHAR NOT NULL,"
+                        + " name2 VARCHAR, name3 VARCHAR"
+                        + " CONSTRAINT PK PRIMARY KEY(name, name1))", tableName));
+                expectedExecCount++;
+                conn.createStatement().execute(String.format("CREATE VIEW %s"
+                        + " (name1 CHAR(5)) AS SELECT * FROM %s", viewName, tableName));
+                fail("Should have failed with ColumnAlreadyExistsException");
+            } catch (ColumnAlreadyExistsException e) {
+                // expected
+                failureCount++;
+            }
+            try {
+                // Case 6 - Adding a column as "Default_CF.Column" in
+                // ALTER TABLE query where "Column" is same as one of the
+                // PK Column from Composite PK Columns
+                conn.createStatement().execute(
+                        String.format("DROP TABLE %s", tableName));
+                conn.createStatement().execute(String.format("CREATE TABLE %s"
+                        + " (name VARCHAR NOT NULL, name1 VARCHAR NOT NULL,"
+                        + " name2 VARCHAR, name3 VARCHAR"
+                        + " CONSTRAINT PK PRIMARY KEY(name, name1))", tableName));
+                expectedExecCount++;
+                conn.createStatement().execute(
+                        String.format("ALTER TABLE %s ADD name1 INTEGER", tableName));
+                fail("Should have failed with ColumnAlreadyExistsException");
+            } catch (ColumnAlreadyExistsException e) {
+                // expected
+                failureCount++;
+            }
+            try {
+                // Case 7 - Adding a column as "Default_CF.Column" in
+                // ALTER TABLE query where "Column" is same as single PK Column
+                conn.createStatement().execute(
+                        String.format("DROP TABLE %s", tableName));
+                conn.createStatement().execute(String.format("CREATE TABLE %s"
+                        + " (name VARCHAR NOT NULL PRIMARY KEY, city VARCHAR,"
+                        + " name1 VARCHAR, name2 VARCHAR)", tableName));
+                expectedExecCount++;
+                conn.createStatement().execute(
+                        String.format("ALTER TABLE %s ADD name VARCHAR", tableName));
+                fail("Should have failed with ColumnAlreadyExistsException");
+            } catch (ColumnAlreadyExistsException e) {
+                // expected
+                failureCount++;
+            }
+            assertEquals(7, failureCount);
+            assertEquals(3, expectedExecCount);
+
+            conn.createStatement().execute(
+                    String.format("DROP TABLE %s", tableName));
+            // Case 8 - Adding a column as "Non_Default_CF.Column" in
+            // CREATE TABLE query where "Column" is same as single PK Column
+            // Hence, we allow creating such column
+            conn.createStatement().execute(String.format("CREATE TABLE %s"
+                    + " (name VARCHAR NOT NULL PRIMARY KEY, city VARCHAR,"
+                    + " name1 VARCHAR, a.name VARCHAR)", tableName));
+            conn.createStatement().execute(
+                    String.format("DROP TABLE %s", tableName));
+            // Case 9 - Adding a column as "Non_Default_CF.Column" in
+            // ALTER TABLE query where "Column" is same as single PK Column
+            // Hence, we allow creating such column
+            conn.createStatement().execute(String.format("CREATE TABLE %s"
+                    + " (name VARCHAR NOT NULL PRIMARY KEY, city VARCHAR,"
+                    + " name1 VARCHAR, name2 VARCHAR)", tableName));
+            conn.createStatement().execute(
+                    String.format("ALTER TABLE %s ADD a.name VARCHAR", tableName));
+        }
+    }
+
+    @Test
     public void testCreateTable() throws Exception {
         String schemaName = "TEST";
         String tableName = schemaName + generateUniqueName();
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/monitoring/PhoenixLoggingMetricsIT.java b/phoenix-core/src/it/java/org/apache/phoenix/monitoring/PhoenixLoggingMetricsIT.java
index 483d341..b2413ea 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/monitoring/PhoenixLoggingMetricsIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/monitoring/PhoenixLoggingMetricsIT.java
@@ -31,6 +31,7 @@ import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.Map;
 
+import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 
 public class PhoenixLoggingMetricsIT extends BasePhoenixMetricsIT {
@@ -74,28 +75,28 @@ public class PhoenixLoggingMetricsIT extends BasePhoenixMetricsIT {
         String tableName3 = generateUniqueName();
 
         String create = "CREATE TABLE " + tableName3 + " (K INTEGER PRIMARY KEY)";
-        assertTrue(executeAndGetResultSet(create) == null);
+        assertNull(executeAndGetResultSet(create));
 
         String upsert = "UPSERT INTO " + tableName3 + " VALUES (42)";
-        assertTrue(executeAndGetResultSet(upsert) == null);
+        assertNull(executeAndGetResultSet(upsert));
 
         String select = "SELECT * FROM " + tableName3;
         assertTrue(executeAndGetResultSet(select) instanceof LoggingPhoenixResultSet);
 
-        String createView = "CREATE VIEW TEST_VIEW (K INTEGER) AS SELECT * FROM " + tableName3;
-        assertTrue(executeAndGetResultSet(createView) == null);
+        String createView = "CREATE VIEW TEST_VIEW (K1 INTEGER) AS SELECT * FROM " + tableName3;
+        assertNull(executeAndGetResultSet(createView));
 
         String createIndex = "CREATE INDEX TEST_INDEX ON " + tableName3 + " (K)";
-        assertTrue(executeAndGetResultSet(createIndex) == null);
+        assertNull(executeAndGetResultSet(createIndex));
 
         String dropIndex = "DROP INDEX TEST_INDEX ON " + tableName3;
-        assertTrue(executeAndGetResultSet(dropIndex) == null);
+        assertNull(executeAndGetResultSet(dropIndex));
 
         String dropView = "DROP VIEW TEST_VIEW";
-        assertTrue(executeAndGetResultSet(dropView) == null);
+        assertNull(executeAndGetResultSet(dropView));
 
         String dropTable = "DROP TABLE " + tableName3;
-        assertTrue(executeAndGetResultSet(dropTable) == null);
+        assertNull(executeAndGetResultSet(dropTable));
     }
 
     @Test
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/AddColumnMutator.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/AddColumnMutator.java
index 7e74c3d..cf6d098 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/AddColumnMutator.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/AddColumnMutator.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
+import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.ColumnFamilyNotFoundException;
 import org.apache.phoenix.schema.ColumnNotFoundException;
 import org.apache.phoenix.schema.PColumn;
@@ -333,6 +334,11 @@ public class AddColumnMutator implements ColumnMutator {
                                 EnvironmentEdgeManager.currentTimeMillis(), null);
                     }
                     if (familyName!=null && familyName.length > 0) {
+                        MetaDataMutationResult result =
+                                compareWithPkColumns(colName, table, familyName);
+                        if (result != null) {
+                            return result;
+                        }
                         PColumnFamily family =
                                 table.getColumnFamily(familyName);
                         family.getPColumnForColumnNameBytes(colName);
@@ -434,6 +440,24 @@ public class AddColumnMutator implements ColumnMutator {
         return null;
     }
 
+    private MetaDataMutationResult compareWithPkColumns(byte[] colName,
+            PTable table, byte[] familyName) {
+        // check if column is matching with any of pk columns if given
+        // column belongs to default CF
+        if (Bytes.compareTo(familyName, QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES) == 0
+                && colName != null && colName.length > 0) {
+            for (PColumn pColumn : table.getPKColumns()) {
+                if (Bytes.compareTo(
+                        pColumn.getName().getBytes(), colName) == 0) {
+                    return new MetaDataProtocol.MetaDataMutationResult(
+                            MetaDataProtocol.MutationCode.COLUMN_ALREADY_EXISTS,
+                            EnvironmentEdgeManager.currentTimeMillis(), table);
+                }
+            }
+        }
+        return null;
+    }
+
     @Override
     public List<Pair<PTable, PColumn>> getTableAndDroppedColumnPairs() {
         return Collections.emptyList();
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
index 6b54b53..448a8fc 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
@@ -2635,6 +2635,10 @@ public class MetaDataClient {
             // Keep track of all columns that are newly added to a view
             Set<Integer> viewNewColumnPositions =
                     Sets.newHashSetWithExpectedSize(colDefs.size());
+            Set<String> pkColumnNames = new HashSet<>();
+            for (PColumn pColumn : pkColumns) {
+                pkColumnNames.add(pColumn.getName().toString());
+            }
             for (ColumnDef colDef : colDefs) {
                 rowTimeStampColumnAlreadyFound = checkAndValidateRowTimestampCol(colDef, pkConstraint, rowTimeStampColumnAlreadyFound, tableType);
                 if (colDef.isPK()) { // i.e. the column is declared as CREATE TABLE COLNAME DATATYPE PRIMARY KEY...
@@ -2698,11 +2702,16 @@ public class MetaDataClient {
                         throw new ColumnAlreadyExistsException(schemaName, tableName, column.getName().getString());
                     }
                 }
-                if (columns.put(column, column) != null) {
-                    throw new ColumnAlreadyExistsException(schemaName, tableName, column.getName().getString());
+                // check for duplicate column
+                if (isDuplicateColumn(columns, pkColumnNames, column)) {
+                    throw new ColumnAlreadyExistsException(schemaName, tableName,
+                            column.getName().getString());
                 } else if (tableType == VIEW) {
                     viewNewColumnPositions.add(column.getPosition());
                 }
+                if (isPkColumn) {
+                    pkColumnNames.add(column.getName().toString());
+                }
                 if ((colDef.getDataType() == PVarbinary.INSTANCE || colDef.getDataType().isArrayType())
                         && SchemaUtil.isPKColumn(column)
                         && pkColumnsIterator.hasNext()) {
@@ -3180,6 +3189,16 @@ public class MetaDataClient {
         }
     }
 
+    private boolean isDuplicateColumn(LinkedHashMap<PColumn, PColumn> columns,
+            Set<String> pkColumnNames, PColumn column) {
+        // either column name is same within same CF or column name within
+        // default CF is same as any of PK column
+        return columns.put(column, column) != null
+                || (column.getFamilyName() != null
+                && DEFAULT_COLUMN_FAMILY.equals(column.getFamilyName().toString())
+                && pkColumnNames.contains(column.getName().toString()));
+    }
+
     private void verifyChangeDetectionTableType(PTableType tableType, Boolean isChangeDetectionEnabledProp) throws SQLException {
         if (isChangeDetectionEnabledProp != null && isChangeDetectionEnabledProp) {
             if (tableType != TABLE && tableType != VIEW) {
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryOptimizerTest.java b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryOptimizerTest.java
index 5810fed..7b137e9 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryOptimizerTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryOptimizerTest.java
@@ -65,10 +65,7 @@ import com.google.common.base.Splitter;
 public class QueryOptimizerTest extends BaseConnectionlessQueryTest {
     
     public static final String SCHEMA_NAME = "";
-    public static final String DATA_TABLE_NAME = "T";
-    public static final String INDEX_TABLE_NAME = "I";
     public static final String DATA_TABLE_FULL_NAME = SchemaUtil.getTableName(SCHEMA_NAME, "T");
-    public static final String INDEX_TABLE_FULL_NAME = SchemaUtil.getTableName(SCHEMA_NAME, "I");
 
     public QueryOptimizerTest() {
     }
@@ -515,7 +512,7 @@ public class QueryOptimizerTest extends BaseConnectionlessQueryTest {
             
             // create a tenant specific view if multi-tenant
             if (multitenant) {
-                conn.createStatement().execute("CREATE VIEW ABC_VIEW (ORGANIZATION_ID VARCHAR) AS SELECT * FROM XYZ.ABC");
+                conn.createStatement().execute("CREATE VIEW ABC_VIEW (ORG_ID VARCHAR) AS SELECT * FROM XYZ.ABC");
             }
             
             String expectedColNames = multitenant ? addQuotes(null, "DEC,A_STRING_ARRAY") : addQuotes(null,"ORGANIZATION_ID,DEC,A_STRING_ARRAY");