You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/03/02 23:38:08 UTC
svn commit: r1451954 [4/27] - in /hive/branches/ptf-windowing: ./
cli/src/java/org/apache/hadoop/hive/cli/ common/src/java/conf/
common/src/java/org/apache/hadoop/hive/conf/ conf/
contrib/src/test/results/clientpositive/ data/conf/ data/files/ hbase-ha...
Modified: hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php?rev=1451954&r1=1451953&r2=1451954&view=diff
==============================================================================
--- hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php (original)
+++ hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php Sat Mar 2 22:37:59 2013
@@ -65,6 +65,7 @@ interface ThriftHiveMetastoreIf extends
public function alter_partitions($db_name, $tbl_name, $new_parts);
public function alter_partition_with_environment_context($db_name, $tbl_name, \metastore\Partition $new_part, \metastore\EnvironmentContext $environment_context);
public function rename_partition($db_name, $tbl_name, $part_vals, \metastore\Partition $new_part);
+ public function partition_name_has_valid_characters($part_vals, $throw_exception);
public function get_config_value($name, $defaultValue);
public function partition_name_to_vals($part_name);
public function partition_name_to_spec($part_name);
@@ -2980,6 +2981,61 @@ class ThriftHiveMetastoreClient extends
return;
}
+ public function partition_name_has_valid_characters($part_vals, $throw_exception)
+ {
+ $this->send_partition_name_has_valid_characters($part_vals, $throw_exception);
+ return $this->recv_partition_name_has_valid_characters();
+ }
+
+ public function send_partition_name_has_valid_characters($part_vals, $throw_exception)
+ {
+ $args = new \metastore\ThriftHiveMetastore_partition_name_has_valid_characters_args();
+ $args->part_vals = $part_vals;
+ $args->throw_exception = $throw_exception;
+ $bin_accel = ($this->output_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_write_binary');
+ if ($bin_accel)
+ {
+ thrift_protocol_write_binary($this->output_, 'partition_name_has_valid_characters', TMessageType::CALL, $args, $this->seqid_, $this->output_->isStrictWrite());
+ }
+ else
+ {
+ $this->output_->writeMessageBegin('partition_name_has_valid_characters', TMessageType::CALL, $this->seqid_);
+ $args->write($this->output_);
+ $this->output_->writeMessageEnd();
+ $this->output_->getTransport()->flush();
+ }
+ }
+
+ public function recv_partition_name_has_valid_characters()
+ {
+ $bin_accel = ($this->input_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_read_binary');
+ if ($bin_accel) $result = thrift_protocol_read_binary($this->input_, '\metastore\ThriftHiveMetastore_partition_name_has_valid_characters_result', $this->input_->isStrictRead());
+ else
+ {
+ $rseqid = 0;
+ $fname = null;
+ $mtype = 0;
+
+ $this->input_->readMessageBegin($fname, $mtype, $rseqid);
+ if ($mtype == TMessageType::EXCEPTION) {
+ $x = new TApplicationException();
+ $x->read($this->input_);
+ $this->input_->readMessageEnd();
+ throw $x;
+ }
+ $result = new \metastore\ThriftHiveMetastore_partition_name_has_valid_characters_result();
+ $result->read($this->input_);
+ $this->input_->readMessageEnd();
+ }
+ if ($result->success !== null) {
+ return $result->success;
+ }
+ if ($result->o1 !== null) {
+ throw $result->o1;
+ }
+ throw new \Exception("partition_name_has_valid_characters failed: unknown result");
+ }
+
public function get_config_value($name, $defaultValue)
{
$this->send_get_config_value($name, $defaultValue);
@@ -16829,6 +16885,218 @@ class ThriftHiveMetastore_rename_partiti
}
+class ThriftHiveMetastore_partition_name_has_valid_characters_args {
+ static $_TSPEC;
+
+ public $part_vals = null;
+ public $throw_exception = null;
+
+ public function __construct($vals=null) {
+ if (!isset(self::$_TSPEC)) {
+ self::$_TSPEC = array(
+ 1 => array(
+ 'var' => 'part_vals',
+ 'type' => TType::LST,
+ 'etype' => TType::STRING,
+ 'elem' => array(
+ 'type' => TType::STRING,
+ ),
+ ),
+ 2 => array(
+ 'var' => 'throw_exception',
+ 'type' => TType::BOOL,
+ ),
+ );
+ }
+ if (is_array($vals)) {
+ if (isset($vals['part_vals'])) {
+ $this->part_vals = $vals['part_vals'];
+ }
+ if (isset($vals['throw_exception'])) {
+ $this->throw_exception = $vals['throw_exception'];
+ }
+ }
+ }
+
+ public function getName() {
+ return 'ThriftHiveMetastore_partition_name_has_valid_characters_args';
+ }
+
+ public function read($input)
+ {
+ $xfer = 0;
+ $fname = null;
+ $ftype = 0;
+ $fid = 0;
+ $xfer += $input->readStructBegin($fname);
+ while (true)
+ {
+ $xfer += $input->readFieldBegin($fname, $ftype, $fid);
+ if ($ftype == TType::STOP) {
+ break;
+ }
+ switch ($fid)
+ {
+ case 1:
+ if ($ftype == TType::LST) {
+ $this->part_vals = array();
+ $_size467 = 0;
+ $_etype470 = 0;
+ $xfer += $input->readListBegin($_etype470, $_size467);
+ for ($_i471 = 0; $_i471 < $_size467; ++$_i471)
+ {
+ $elem472 = null;
+ $xfer += $input->readString($elem472);
+ $this->part_vals []= $elem472;
+ }
+ $xfer += $input->readListEnd();
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
+ case 2:
+ if ($ftype == TType::BOOL) {
+ $xfer += $input->readBool($this->throw_exception);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
+ default:
+ $xfer += $input->skip($ftype);
+ break;
+ }
+ $xfer += $input->readFieldEnd();
+ }
+ $xfer += $input->readStructEnd();
+ return $xfer;
+ }
+
+ public function write($output) {
+ $xfer = 0;
+ $xfer += $output->writeStructBegin('ThriftHiveMetastore_partition_name_has_valid_characters_args');
+ if ($this->part_vals !== null) {
+ if (!is_array($this->part_vals)) {
+ throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
+ }
+ $xfer += $output->writeFieldBegin('part_vals', TType::LST, 1);
+ {
+ $output->writeListBegin(TType::STRING, count($this->part_vals));
+ {
+ foreach ($this->part_vals as $iter473)
+ {
+ $xfer += $output->writeString($iter473);
+ }
+ }
+ $output->writeListEnd();
+ }
+ $xfer += $output->writeFieldEnd();
+ }
+ if ($this->throw_exception !== null) {
+ $xfer += $output->writeFieldBegin('throw_exception', TType::BOOL, 2);
+ $xfer += $output->writeBool($this->throw_exception);
+ $xfer += $output->writeFieldEnd();
+ }
+ $xfer += $output->writeFieldStop();
+ $xfer += $output->writeStructEnd();
+ return $xfer;
+ }
+
+}
+
+class ThriftHiveMetastore_partition_name_has_valid_characters_result {
+ static $_TSPEC;
+
+ public $success = null;
+ public $o1 = null;
+
+ public function __construct($vals=null) {
+ if (!isset(self::$_TSPEC)) {
+ self::$_TSPEC = array(
+ 0 => array(
+ 'var' => 'success',
+ 'type' => TType::BOOL,
+ ),
+ 1 => array(
+ 'var' => 'o1',
+ 'type' => TType::STRUCT,
+ 'class' => '\metastore\MetaException',
+ ),
+ );
+ }
+ if (is_array($vals)) {
+ if (isset($vals['success'])) {
+ $this->success = $vals['success'];
+ }
+ if (isset($vals['o1'])) {
+ $this->o1 = $vals['o1'];
+ }
+ }
+ }
+
+ public function getName() {
+ return 'ThriftHiveMetastore_partition_name_has_valid_characters_result';
+ }
+
+ public function read($input)
+ {
+ $xfer = 0;
+ $fname = null;
+ $ftype = 0;
+ $fid = 0;
+ $xfer += $input->readStructBegin($fname);
+ while (true)
+ {
+ $xfer += $input->readFieldBegin($fname, $ftype, $fid);
+ if ($ftype == TType::STOP) {
+ break;
+ }
+ switch ($fid)
+ {
+ case 0:
+ if ($ftype == TType::BOOL) {
+ $xfer += $input->readBool($this->success);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
+ case 1:
+ if ($ftype == TType::STRUCT) {
+ $this->o1 = new \metastore\MetaException();
+ $xfer += $this->o1->read($input);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
+ default:
+ $xfer += $input->skip($ftype);
+ break;
+ }
+ $xfer += $input->readFieldEnd();
+ }
+ $xfer += $input->readStructEnd();
+ return $xfer;
+ }
+
+ public function write($output) {
+ $xfer = 0;
+ $xfer += $output->writeStructBegin('ThriftHiveMetastore_partition_name_has_valid_characters_result');
+ if ($this->success !== null) {
+ $xfer += $output->writeFieldBegin('success', TType::BOOL, 0);
+ $xfer += $output->writeBool($this->success);
+ $xfer += $output->writeFieldEnd();
+ }
+ if ($this->o1 !== null) {
+ $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1);
+ $xfer += $this->o1->write($output);
+ $xfer += $output->writeFieldEnd();
+ }
+ $xfer += $output->writeFieldStop();
+ $xfer += $output->writeStructEnd();
+ return $xfer;
+ }
+
+}
+
class ThriftHiveMetastore_get_config_value_args {
static $_TSPEC;
@@ -17143,14 +17411,14 @@ class ThriftHiveMetastore_partition_name
case 0:
if ($ftype == TType::LST) {
$this->success = array();
- $_size467 = 0;
- $_etype470 = 0;
- $xfer += $input->readListBegin($_etype470, $_size467);
- for ($_i471 = 0; $_i471 < $_size467; ++$_i471)
+ $_size474 = 0;
+ $_etype477 = 0;
+ $xfer += $input->readListBegin($_etype477, $_size474);
+ for ($_i478 = 0; $_i478 < $_size474; ++$_i478)
{
- $elem472 = null;
- $xfer += $input->readString($elem472);
- $this->success []= $elem472;
+ $elem479 = null;
+ $xfer += $input->readString($elem479);
+ $this->success []= $elem479;
}
$xfer += $input->readListEnd();
} else {
@@ -17186,9 +17454,9 @@ class ThriftHiveMetastore_partition_name
{
$output->writeListBegin(TType::STRING, count($this->success));
{
- foreach ($this->success as $iter473)
+ foreach ($this->success as $iter480)
{
- $xfer += $output->writeString($iter473);
+ $xfer += $output->writeString($iter480);
}
}
$output->writeListEnd();
@@ -17339,17 +17607,17 @@ class ThriftHiveMetastore_partition_name
case 0:
if ($ftype == TType::MAP) {
$this->success = array();
- $_size474 = 0;
- $_ktype475 = 0;
- $_vtype476 = 0;
- $xfer += $input->readMapBegin($_ktype475, $_vtype476, $_size474);
- for ($_i478 = 0; $_i478 < $_size474; ++$_i478)
+ $_size481 = 0;
+ $_ktype482 = 0;
+ $_vtype483 = 0;
+ $xfer += $input->readMapBegin($_ktype482, $_vtype483, $_size481);
+ for ($_i485 = 0; $_i485 < $_size481; ++$_i485)
{
- $key479 = '';
- $val480 = '';
- $xfer += $input->readString($key479);
- $xfer += $input->readString($val480);
- $this->success[$key479] = $val480;
+ $key486 = '';
+ $val487 = '';
+ $xfer += $input->readString($key486);
+ $xfer += $input->readString($val487);
+ $this->success[$key486] = $val487;
}
$xfer += $input->readMapEnd();
} else {
@@ -17385,10 +17653,10 @@ class ThriftHiveMetastore_partition_name
{
$output->writeMapBegin(TType::STRING, TType::STRING, count($this->success));
{
- foreach ($this->success as $kiter481 => $viter482)
+ foreach ($this->success as $kiter488 => $viter489)
{
- $xfer += $output->writeString($kiter481);
- $xfer += $output->writeString($viter482);
+ $xfer += $output->writeString($kiter488);
+ $xfer += $output->writeString($viter489);
}
}
$output->writeMapEnd();
@@ -17496,17 +17764,17 @@ class ThriftHiveMetastore_markPartitionF
case 3:
if ($ftype == TType::MAP) {
$this->part_vals = array();
- $_size483 = 0;
- $_ktype484 = 0;
- $_vtype485 = 0;
- $xfer += $input->readMapBegin($_ktype484, $_vtype485, $_size483);
- for ($_i487 = 0; $_i487 < $_size483; ++$_i487)
+ $_size490 = 0;
+ $_ktype491 = 0;
+ $_vtype492 = 0;
+ $xfer += $input->readMapBegin($_ktype491, $_vtype492, $_size490);
+ for ($_i494 = 0; $_i494 < $_size490; ++$_i494)
{
- $key488 = '';
- $val489 = '';
- $xfer += $input->readString($key488);
- $xfer += $input->readString($val489);
- $this->part_vals[$key488] = $val489;
+ $key495 = '';
+ $val496 = '';
+ $xfer += $input->readString($key495);
+ $xfer += $input->readString($val496);
+ $this->part_vals[$key495] = $val496;
}
$xfer += $input->readMapEnd();
} else {
@@ -17551,10 +17819,10 @@ class ThriftHiveMetastore_markPartitionF
{
$output->writeMapBegin(TType::STRING, TType::STRING, count($this->part_vals));
{
- foreach ($this->part_vals as $kiter490 => $viter491)
+ foreach ($this->part_vals as $kiter497 => $viter498)
{
- $xfer += $output->writeString($kiter490);
- $xfer += $output->writeString($viter491);
+ $xfer += $output->writeString($kiter497);
+ $xfer += $output->writeString($viter498);
}
}
$output->writeMapEnd();
@@ -17846,17 +18114,17 @@ class ThriftHiveMetastore_isPartitionMar
case 3:
if ($ftype == TType::MAP) {
$this->part_vals = array();
- $_size492 = 0;
- $_ktype493 = 0;
- $_vtype494 = 0;
- $xfer += $input->readMapBegin($_ktype493, $_vtype494, $_size492);
- for ($_i496 = 0; $_i496 < $_size492; ++$_i496)
+ $_size499 = 0;
+ $_ktype500 = 0;
+ $_vtype501 = 0;
+ $xfer += $input->readMapBegin($_ktype500, $_vtype501, $_size499);
+ for ($_i503 = 0; $_i503 < $_size499; ++$_i503)
{
- $key497 = '';
- $val498 = '';
- $xfer += $input->readString($key497);
- $xfer += $input->readString($val498);
- $this->part_vals[$key497] = $val498;
+ $key504 = '';
+ $val505 = '';
+ $xfer += $input->readString($key504);
+ $xfer += $input->readString($val505);
+ $this->part_vals[$key504] = $val505;
}
$xfer += $input->readMapEnd();
} else {
@@ -17901,10 +18169,10 @@ class ThriftHiveMetastore_isPartitionMar
{
$output->writeMapBegin(TType::STRING, TType::STRING, count($this->part_vals));
{
- foreach ($this->part_vals as $kiter499 => $viter500)
+ foreach ($this->part_vals as $kiter506 => $viter507)
{
- $xfer += $output->writeString($kiter499);
- $xfer += $output->writeString($viter500);
+ $xfer += $output->writeString($kiter506);
+ $xfer += $output->writeString($viter507);
}
}
$output->writeMapEnd();
@@ -19264,15 +19532,15 @@ class ThriftHiveMetastore_get_indexes_re
case 0:
if ($ftype == TType::LST) {
$this->success = array();
- $_size501 = 0;
- $_etype504 = 0;
- $xfer += $input->readListBegin($_etype504, $_size501);
- for ($_i505 = 0; $_i505 < $_size501; ++$_i505)
+ $_size508 = 0;
+ $_etype511 = 0;
+ $xfer += $input->readListBegin($_etype511, $_size508);
+ for ($_i512 = 0; $_i512 < $_size508; ++$_i512)
{
- $elem506 = null;
- $elem506 = new \metastore\Index();
- $xfer += $elem506->read($input);
- $this->success []= $elem506;
+ $elem513 = null;
+ $elem513 = new \metastore\Index();
+ $xfer += $elem513->read($input);
+ $this->success []= $elem513;
}
$xfer += $input->readListEnd();
} else {
@@ -19316,9 +19584,9 @@ class ThriftHiveMetastore_get_indexes_re
{
$output->writeListBegin(TType::STRUCT, count($this->success));
{
- foreach ($this->success as $iter507)
+ foreach ($this->success as $iter514)
{
- $xfer += $iter507->write($output);
+ $xfer += $iter514->write($output);
}
}
$output->writeListEnd();
@@ -19510,14 +19778,14 @@ class ThriftHiveMetastore_get_index_name
case 0:
if ($ftype == TType::LST) {
$this->success = array();
- $_size508 = 0;
- $_etype511 = 0;
- $xfer += $input->readListBegin($_etype511, $_size508);
- for ($_i512 = 0; $_i512 < $_size508; ++$_i512)
+ $_size515 = 0;
+ $_etype518 = 0;
+ $xfer += $input->readListBegin($_etype518, $_size515);
+ for ($_i519 = 0; $_i519 < $_size515; ++$_i519)
{
- $elem513 = null;
- $xfer += $input->readString($elem513);
- $this->success []= $elem513;
+ $elem520 = null;
+ $xfer += $input->readString($elem520);
+ $this->success []= $elem520;
}
$xfer += $input->readListEnd();
} else {
@@ -19553,9 +19821,9 @@ class ThriftHiveMetastore_get_index_name
{
$output->writeListBegin(TType::STRING, count($this->success));
{
- foreach ($this->success as $iter514)
+ foreach ($this->success as $iter521)
{
- $xfer += $output->writeString($iter514);
+ $xfer += $output->writeString($iter521);
}
}
$output->writeListEnd();
@@ -21629,14 +21897,14 @@ class ThriftHiveMetastore_get_role_names
case 0:
if ($ftype == TType::LST) {
$this->success = array();
- $_size515 = 0;
- $_etype518 = 0;
- $xfer += $input->readListBegin($_etype518, $_size515);
- for ($_i519 = 0; $_i519 < $_size515; ++$_i519)
+ $_size522 = 0;
+ $_etype525 = 0;
+ $xfer += $input->readListBegin($_etype525, $_size522);
+ for ($_i526 = 0; $_i526 < $_size522; ++$_i526)
{
- $elem520 = null;
- $xfer += $input->readString($elem520);
- $this->success []= $elem520;
+ $elem527 = null;
+ $xfer += $input->readString($elem527);
+ $this->success []= $elem527;
}
$xfer += $input->readListEnd();
} else {
@@ -21672,9 +21940,9 @@ class ThriftHiveMetastore_get_role_names
{
$output->writeListBegin(TType::STRING, count($this->success));
{
- foreach ($this->success as $iter521)
+ foreach ($this->success as $iter528)
{
- $xfer += $output->writeString($iter521);
+ $xfer += $output->writeString($iter528);
}
}
$output->writeListEnd();
@@ -22314,15 +22582,15 @@ class ThriftHiveMetastore_list_roles_res
case 0:
if ($ftype == TType::LST) {
$this->success = array();
- $_size522 = 0;
- $_etype525 = 0;
- $xfer += $input->readListBegin($_etype525, $_size522);
- for ($_i526 = 0; $_i526 < $_size522; ++$_i526)
+ $_size529 = 0;
+ $_etype532 = 0;
+ $xfer += $input->readListBegin($_etype532, $_size529);
+ for ($_i533 = 0; $_i533 < $_size529; ++$_i533)
{
- $elem527 = null;
- $elem527 = new \metastore\Role();
- $xfer += $elem527->read($input);
- $this->success []= $elem527;
+ $elem534 = null;
+ $elem534 = new \metastore\Role();
+ $xfer += $elem534->read($input);
+ $this->success []= $elem534;
}
$xfer += $input->readListEnd();
} else {
@@ -22358,9 +22626,9 @@ class ThriftHiveMetastore_list_roles_res
{
$output->writeListBegin(TType::STRUCT, count($this->success));
{
- foreach ($this->success as $iter528)
+ foreach ($this->success as $iter535)
{
- $xfer += $iter528->write($output);
+ $xfer += $iter535->write($output);
}
}
$output->writeListEnd();
@@ -22458,14 +22726,14 @@ class ThriftHiveMetastore_get_privilege_
case 3:
if ($ftype == TType::LST) {
$this->group_names = array();
- $_size529 = 0;
- $_etype532 = 0;
- $xfer += $input->readListBegin($_etype532, $_size529);
- for ($_i533 = 0; $_i533 < $_size529; ++$_i533)
+ $_size536 = 0;
+ $_etype539 = 0;
+ $xfer += $input->readListBegin($_etype539, $_size536);
+ for ($_i540 = 0; $_i540 < $_size536; ++$_i540)
{
- $elem534 = null;
- $xfer += $input->readString($elem534);
- $this->group_names []= $elem534;
+ $elem541 = null;
+ $xfer += $input->readString($elem541);
+ $this->group_names []= $elem541;
}
$xfer += $input->readListEnd();
} else {
@@ -22506,9 +22774,9 @@ class ThriftHiveMetastore_get_privilege_
{
$output->writeListBegin(TType::STRING, count($this->group_names));
{
- foreach ($this->group_names as $iter535)
+ foreach ($this->group_names as $iter542)
{
- $xfer += $output->writeString($iter535);
+ $xfer += $output->writeString($iter542);
}
}
$output->writeListEnd();
@@ -22795,15 +23063,15 @@ class ThriftHiveMetastore_list_privilege
case 0:
if ($ftype == TType::LST) {
$this->success = array();
- $_size536 = 0;
- $_etype539 = 0;
- $xfer += $input->readListBegin($_etype539, $_size536);
- for ($_i540 = 0; $_i540 < $_size536; ++$_i540)
+ $_size543 = 0;
+ $_etype546 = 0;
+ $xfer += $input->readListBegin($_etype546, $_size543);
+ for ($_i547 = 0; $_i547 < $_size543; ++$_i547)
{
- $elem541 = null;
- $elem541 = new \metastore\HiveObjectPrivilege();
- $xfer += $elem541->read($input);
- $this->success []= $elem541;
+ $elem548 = null;
+ $elem548 = new \metastore\HiveObjectPrivilege();
+ $xfer += $elem548->read($input);
+ $this->success []= $elem548;
}
$xfer += $input->readListEnd();
} else {
@@ -22839,9 +23107,9 @@ class ThriftHiveMetastore_list_privilege
{
$output->writeListBegin(TType::STRUCT, count($this->success));
{
- foreach ($this->success as $iter542)
+ foreach ($this->success as $iter549)
{
- $xfer += $iter542->write($output);
+ $xfer += $iter549->write($output);
}
}
$output->writeListEnd();
@@ -23264,14 +23532,14 @@ class ThriftHiveMetastore_set_ugi_args {
case 2:
if ($ftype == TType::LST) {
$this->group_names = array();
- $_size543 = 0;
- $_etype546 = 0;
- $xfer += $input->readListBegin($_etype546, $_size543);
- for ($_i547 = 0; $_i547 < $_size543; ++$_i547)
+ $_size550 = 0;
+ $_etype553 = 0;
+ $xfer += $input->readListBegin($_etype553, $_size550);
+ for ($_i554 = 0; $_i554 < $_size550; ++$_i554)
{
- $elem548 = null;
- $xfer += $input->readString($elem548);
- $this->group_names []= $elem548;
+ $elem555 = null;
+ $xfer += $input->readString($elem555);
+ $this->group_names []= $elem555;
}
$xfer += $input->readListEnd();
} else {
@@ -23304,9 +23572,9 @@ class ThriftHiveMetastore_set_ugi_args {
{
$output->writeListBegin(TType::STRING, count($this->group_names));
{
- foreach ($this->group_names as $iter549)
+ foreach ($this->group_names as $iter556)
{
- $xfer += $output->writeString($iter549);
+ $xfer += $output->writeString($iter556);
}
}
$output->writeListEnd();
@@ -23376,14 +23644,14 @@ class ThriftHiveMetastore_set_ugi_result
case 0:
if ($ftype == TType::LST) {
$this->success = array();
- $_size550 = 0;
- $_etype553 = 0;
- $xfer += $input->readListBegin($_etype553, $_size550);
- for ($_i554 = 0; $_i554 < $_size550; ++$_i554)
+ $_size557 = 0;
+ $_etype560 = 0;
+ $xfer += $input->readListBegin($_etype560, $_size557);
+ for ($_i561 = 0; $_i561 < $_size557; ++$_i561)
{
- $elem555 = null;
- $xfer += $input->readString($elem555);
- $this->success []= $elem555;
+ $elem562 = null;
+ $xfer += $input->readString($elem562);
+ $this->success []= $elem562;
}
$xfer += $input->readListEnd();
} else {
@@ -23419,9 +23687,9 @@ class ThriftHiveMetastore_set_ugi_result
{
$output->writeListBegin(TType::STRING, count($this->success));
{
- foreach ($this->success as $iter556)
+ foreach ($this->success as $iter563)
{
- $xfer += $output->writeString($iter556);
+ $xfer += $output->writeString($iter563);
}
}
$output->writeListEnd();
Modified: hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote?rev=1451954&r1=1451953&r2=1451954&view=diff
==============================================================================
--- hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote (original)
+++ hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote Sat Mar 2 22:37:59 2013
@@ -72,6 +72,7 @@ if len(sys.argv) <= 1 or sys.argv[1] ==
print ' void alter_partitions(string db_name, string tbl_name, new_parts)'
print ' void alter_partition_with_environment_context(string db_name, string tbl_name, Partition new_part, EnvironmentContext environment_context)'
print ' void rename_partition(string db_name, string tbl_name, part_vals, Partition new_part)'
+ print ' bool partition_name_has_valid_characters( part_vals, bool throw_exception)'
print ' string get_config_value(string name, string defaultValue)'
print ' partition_name_to_vals(string part_name)'
print ' partition_name_to_spec(string part_name)'
@@ -448,6 +449,12 @@ elif cmd == 'rename_partition':
sys.exit(1)
pp.pprint(client.rename_partition(args[0],args[1],eval(args[2]),eval(args[3]),))
+elif cmd == 'partition_name_has_valid_characters':
+ if len(args) != 2:
+ print 'partition_name_has_valid_characters requires 2 args'
+ sys.exit(1)
+ pp.pprint(client.partition_name_has_valid_characters(eval(args[0]),eval(args[1]),))
+
elif cmd == 'get_config_value':
if len(args) != 2:
print 'get_config_value requires 2 args'
Modified: hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py?rev=1451954&r1=1451953&r2=1451954&view=diff
==============================================================================
--- hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py (original)
+++ hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py Sat Mar 2 22:37:59 2013
@@ -449,6 +449,14 @@ class Iface(fb303.FacebookService.Iface)
"""
pass
+ def partition_name_has_valid_characters(self, part_vals, throw_exception):
+ """
+ Parameters:
+ - part_vals
+ - throw_exception
+ """
+ pass
+
def get_config_value(self, name, defaultValue):
"""
Parameters:
@@ -2549,6 +2557,40 @@ class Client(fb303.FacebookService.Clien
raise result.o2
return
+ def partition_name_has_valid_characters(self, part_vals, throw_exception):
+ """
+ Parameters:
+ - part_vals
+ - throw_exception
+ """
+ self.send_partition_name_has_valid_characters(part_vals, throw_exception)
+ return self.recv_partition_name_has_valid_characters()
+
+ def send_partition_name_has_valid_characters(self, part_vals, throw_exception):
+ self._oprot.writeMessageBegin('partition_name_has_valid_characters', TMessageType.CALL, self._seqid)
+ args = partition_name_has_valid_characters_args()
+ args.part_vals = part_vals
+ args.throw_exception = throw_exception
+ args.write(self._oprot)
+ self._oprot.writeMessageEnd()
+ self._oprot.trans.flush()
+
+ def recv_partition_name_has_valid_characters(self, ):
+ (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+ if mtype == TMessageType.EXCEPTION:
+ x = TApplicationException()
+ x.read(self._iprot)
+ self._iprot.readMessageEnd()
+ raise x
+ result = partition_name_has_valid_characters_result()
+ result.read(self._iprot)
+ self._iprot.readMessageEnd()
+ if result.success is not None:
+ return result.success
+ if result.o1 is not None:
+ raise result.o1
+ raise TApplicationException(TApplicationException.MISSING_RESULT, "partition_name_has_valid_characters failed: unknown result");
+
def get_config_value(self, name, defaultValue):
"""
Parameters:
@@ -3739,6 +3781,7 @@ class Processor(fb303.FacebookService.Pr
self._processMap["alter_partitions"] = Processor.process_alter_partitions
self._processMap["alter_partition_with_environment_context"] = Processor.process_alter_partition_with_environment_context
self._processMap["rename_partition"] = Processor.process_rename_partition
+ self._processMap["partition_name_has_valid_characters"] = Processor.process_partition_name_has_valid_characters
self._processMap["get_config_value"] = Processor.process_get_config_value
self._processMap["partition_name_to_vals"] = Processor.process_partition_name_to_vals
self._processMap["partition_name_to_spec"] = Processor.process_partition_name_to_spec
@@ -4594,6 +4637,20 @@ class Processor(fb303.FacebookService.Pr
oprot.writeMessageEnd()
oprot.trans.flush()
+ def process_partition_name_has_valid_characters(self, seqid, iprot, oprot):
+ args = partition_name_has_valid_characters_args()
+ args.read(iprot)
+ iprot.readMessageEnd()
+ result = partition_name_has_valid_characters_result()
+ try:
+ result.success = self._handler.partition_name_has_valid_characters(args.part_vals, args.throw_exception)
+ except MetaException as o1:
+ result.o1 = o1
+ oprot.writeMessageBegin("partition_name_has_valid_characters", TMessageType.REPLY, seqid)
+ result.write(oprot)
+ oprot.writeMessageEnd()
+ oprot.trans.flush()
+
def process_get_config_value(self, seqid, iprot, oprot):
args = get_config_value_args()
args.read(iprot)
@@ -13574,6 +13631,158 @@ class rename_partition_result:
def __ne__(self, other):
return not (self == other)
+class partition_name_has_valid_characters_args:
+ """
+ Attributes:
+ - part_vals
+ - throw_exception
+ """
+
+ thrift_spec = (
+ None, # 0
+ (1, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 1
+ (2, TType.BOOL, 'throw_exception', None, None, ), # 2
+ )
+
+ def __init__(self, part_vals=None, throw_exception=None,):
+ self.part_vals = part_vals
+ self.throw_exception = throw_exception
+
+ def read(self, iprot):
+ if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+ fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+ return
+ iprot.readStructBegin()
+ while True:
+ (fname, ftype, fid) = iprot.readFieldBegin()
+ if ftype == TType.STOP:
+ break
+ if fid == 1:
+ if ftype == TType.LIST:
+ self.part_vals = []
+ (_etype470, _size467) = iprot.readListBegin()
+ for _i471 in xrange(_size467):
+ _elem472 = iprot.readString();
+ self.part_vals.append(_elem472)
+ iprot.readListEnd()
+ else:
+ iprot.skip(ftype)
+ elif fid == 2:
+ if ftype == TType.BOOL:
+ self.throw_exception = iprot.readBool();
+ else:
+ iprot.skip(ftype)
+ else:
+ iprot.skip(ftype)
+ iprot.readFieldEnd()
+ iprot.readStructEnd()
+
+ def write(self, oprot):
+ if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+ oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+ return
+ oprot.writeStructBegin('partition_name_has_valid_characters_args')
+ if self.part_vals is not None:
+ oprot.writeFieldBegin('part_vals', TType.LIST, 1)
+ oprot.writeListBegin(TType.STRING, len(self.part_vals))
+ for iter473 in self.part_vals:
+ oprot.writeString(iter473)
+ oprot.writeListEnd()
+ oprot.writeFieldEnd()
+ if self.throw_exception is not None:
+ oprot.writeFieldBegin('throw_exception', TType.BOOL, 2)
+ oprot.writeBool(self.throw_exception)
+ oprot.writeFieldEnd()
+ oprot.writeFieldStop()
+ oprot.writeStructEnd()
+
+ def validate(self):
+ return
+
+
+ def __repr__(self):
+ L = ['%s=%r' % (key, value)
+ for key, value in self.__dict__.iteritems()]
+ return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+ def __ne__(self, other):
+ return not (self == other)
+
+class partition_name_has_valid_characters_result:
+ """
+ Attributes:
+ - success
+ - o1
+ """
+
+ thrift_spec = (
+ (0, TType.BOOL, 'success', None, None, ), # 0
+ (1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
+ )
+
+ def __init__(self, success=None, o1=None,):
+ self.success = success
+ self.o1 = o1
+
+ def read(self, iprot):
+ if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+ fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+ return
+ iprot.readStructBegin()
+ while True:
+ (fname, ftype, fid) = iprot.readFieldBegin()
+ if ftype == TType.STOP:
+ break
+ if fid == 0:
+ if ftype == TType.BOOL:
+ self.success = iprot.readBool();
+ else:
+ iprot.skip(ftype)
+ elif fid == 1:
+ if ftype == TType.STRUCT:
+ self.o1 = MetaException()
+ self.o1.read(iprot)
+ else:
+ iprot.skip(ftype)
+ else:
+ iprot.skip(ftype)
+ iprot.readFieldEnd()
+ iprot.readStructEnd()
+
+ def write(self, oprot):
+ if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+ oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+ return
+ oprot.writeStructBegin('partition_name_has_valid_characters_result')
+ if self.success is not None:
+ oprot.writeFieldBegin('success', TType.BOOL, 0)
+ oprot.writeBool(self.success)
+ oprot.writeFieldEnd()
+ if self.o1 is not None:
+ oprot.writeFieldBegin('o1', TType.STRUCT, 1)
+ self.o1.write(oprot)
+ oprot.writeFieldEnd()
+ oprot.writeFieldStop()
+ oprot.writeStructEnd()
+
+ def validate(self):
+ return
+
+
+ def __repr__(self):
+ L = ['%s=%r' % (key, value)
+ for key, value in self.__dict__.iteritems()]
+ return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+ def __ne__(self, other):
+ return not (self == other)
+
class get_config_value_args:
"""
Attributes:
@@ -13806,10 +14015,10 @@ class partition_name_to_vals_result:
if fid == 0:
if ftype == TType.LIST:
self.success = []
- (_etype470, _size467) = iprot.readListBegin()
- for _i471 in xrange(_size467):
- _elem472 = iprot.readString();
- self.success.append(_elem472)
+ (_etype477, _size474) = iprot.readListBegin()
+ for _i478 in xrange(_size474):
+ _elem479 = iprot.readString();
+ self.success.append(_elem479)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -13832,8 +14041,8 @@ class partition_name_to_vals_result:
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
- for iter473 in self.success:
- oprot.writeString(iter473)
+ for iter480 in self.success:
+ oprot.writeString(iter480)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
@@ -13946,11 +14155,11 @@ class partition_name_to_spec_result:
if fid == 0:
if ftype == TType.MAP:
self.success = {}
- (_ktype475, _vtype476, _size474 ) = iprot.readMapBegin()
- for _i478 in xrange(_size474):
- _key479 = iprot.readString();
- _val480 = iprot.readString();
- self.success[_key479] = _val480
+ (_ktype482, _vtype483, _size481 ) = iprot.readMapBegin()
+ for _i485 in xrange(_size481):
+ _key486 = iprot.readString();
+ _val487 = iprot.readString();
+ self.success[_key486] = _val487
iprot.readMapEnd()
else:
iprot.skip(ftype)
@@ -13973,9 +14182,9 @@ class partition_name_to_spec_result:
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.success))
- for kiter481,viter482 in self.success.items():
- oprot.writeString(kiter481)
- oprot.writeString(viter482)
+ for kiter488,viter489 in self.success.items():
+ oprot.writeString(kiter488)
+ oprot.writeString(viter489)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
@@ -14045,11 +14254,11 @@ class markPartitionForEvent_args:
elif fid == 3:
if ftype == TType.MAP:
self.part_vals = {}
- (_ktype484, _vtype485, _size483 ) = iprot.readMapBegin()
- for _i487 in xrange(_size483):
- _key488 = iprot.readString();
- _val489 = iprot.readString();
- self.part_vals[_key488] = _val489
+ (_ktype491, _vtype492, _size490 ) = iprot.readMapBegin()
+ for _i494 in xrange(_size490):
+ _key495 = iprot.readString();
+ _val496 = iprot.readString();
+ self.part_vals[_key495] = _val496
iprot.readMapEnd()
else:
iprot.skip(ftype)
@@ -14079,9 +14288,9 @@ class markPartitionForEvent_args:
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.part_vals))
- for kiter490,viter491 in self.part_vals.items():
- oprot.writeString(kiter490)
- oprot.writeString(viter491)
+ for kiter497,viter498 in self.part_vals.items():
+ oprot.writeString(kiter497)
+ oprot.writeString(viter498)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.eventType is not None:
@@ -14277,11 +14486,11 @@ class isPartitionMarkedForEvent_args:
elif fid == 3:
if ftype == TType.MAP:
self.part_vals = {}
- (_ktype493, _vtype494, _size492 ) = iprot.readMapBegin()
- for _i496 in xrange(_size492):
- _key497 = iprot.readString();
- _val498 = iprot.readString();
- self.part_vals[_key497] = _val498
+ (_ktype500, _vtype501, _size499 ) = iprot.readMapBegin()
+ for _i503 in xrange(_size499):
+ _key504 = iprot.readString();
+ _val505 = iprot.readString();
+ self.part_vals[_key504] = _val505
iprot.readMapEnd()
else:
iprot.skip(ftype)
@@ -14311,9 +14520,9 @@ class isPartitionMarkedForEvent_args:
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.part_vals))
- for kiter499,viter500 in self.part_vals.items():
- oprot.writeString(kiter499)
- oprot.writeString(viter500)
+ for kiter506,viter507 in self.part_vals.items():
+ oprot.writeString(kiter506)
+ oprot.writeString(viter507)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.eventType is not None:
@@ -15285,11 +15494,11 @@ class get_indexes_result:
if fid == 0:
if ftype == TType.LIST:
self.success = []
- (_etype504, _size501) = iprot.readListBegin()
- for _i505 in xrange(_size501):
- _elem506 = Index()
- _elem506.read(iprot)
- self.success.append(_elem506)
+ (_etype511, _size508) = iprot.readListBegin()
+ for _i512 in xrange(_size508):
+ _elem513 = Index()
+ _elem513.read(iprot)
+ self.success.append(_elem513)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -15318,8 +15527,8 @@ class get_indexes_result:
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
- for iter507 in self.success:
- iter507.write(oprot)
+ for iter514 in self.success:
+ iter514.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
@@ -15460,10 +15669,10 @@ class get_index_names_result:
if fid == 0:
if ftype == TType.LIST:
self.success = []
- (_etype511, _size508) = iprot.readListBegin()
- for _i512 in xrange(_size508):
- _elem513 = iprot.readString();
- self.success.append(_elem513)
+ (_etype518, _size515) = iprot.readListBegin()
+ for _i519 in xrange(_size515):
+ _elem520 = iprot.readString();
+ self.success.append(_elem520)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -15486,8 +15695,8 @@ class get_index_names_result:
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
- for iter514 in self.success:
- oprot.writeString(iter514)
+ for iter521 in self.success:
+ oprot.writeString(iter521)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o2 is not None:
@@ -16997,10 +17206,10 @@ class get_role_names_result:
if fid == 0:
if ftype == TType.LIST:
self.success = []
- (_etype518, _size515) = iprot.readListBegin()
- for _i519 in xrange(_size515):
- _elem520 = iprot.readString();
- self.success.append(_elem520)
+ (_etype525, _size522) = iprot.readListBegin()
+ for _i526 in xrange(_size522):
+ _elem527 = iprot.readString();
+ self.success.append(_elem527)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -17023,8 +17232,8 @@ class get_role_names_result:
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
- for iter521 in self.success:
- oprot.writeString(iter521)
+ for iter528 in self.success:
+ oprot.writeString(iter528)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
@@ -17497,11 +17706,11 @@ class list_roles_result:
if fid == 0:
if ftype == TType.LIST:
self.success = []
- (_etype525, _size522) = iprot.readListBegin()
- for _i526 in xrange(_size522):
- _elem527 = Role()
- _elem527.read(iprot)
- self.success.append(_elem527)
+ (_etype532, _size529) = iprot.readListBegin()
+ for _i533 in xrange(_size529):
+ _elem534 = Role()
+ _elem534.read(iprot)
+ self.success.append(_elem534)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -17524,8 +17733,8 @@ class list_roles_result:
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
- for iter528 in self.success:
- iter528.write(oprot)
+ for iter535 in self.success:
+ iter535.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
@@ -17593,10 +17802,10 @@ class get_privilege_set_args:
elif fid == 3:
if ftype == TType.LIST:
self.group_names = []
- (_etype532, _size529) = iprot.readListBegin()
- for _i533 in xrange(_size529):
- _elem534 = iprot.readString();
- self.group_names.append(_elem534)
+ (_etype539, _size536) = iprot.readListBegin()
+ for _i540 in xrange(_size536):
+ _elem541 = iprot.readString();
+ self.group_names.append(_elem541)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -17621,8 +17830,8 @@ class get_privilege_set_args:
if self.group_names is not None:
oprot.writeFieldBegin('group_names', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.group_names))
- for iter535 in self.group_names:
- oprot.writeString(iter535)
+ for iter542 in self.group_names:
+ oprot.writeString(iter542)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -17829,11 +18038,11 @@ class list_privileges_result:
if fid == 0:
if ftype == TType.LIST:
self.success = []
- (_etype539, _size536) = iprot.readListBegin()
- for _i540 in xrange(_size536):
- _elem541 = HiveObjectPrivilege()
- _elem541.read(iprot)
- self.success.append(_elem541)
+ (_etype546, _size543) = iprot.readListBegin()
+ for _i547 in xrange(_size543):
+ _elem548 = HiveObjectPrivilege()
+ _elem548.read(iprot)
+ self.success.append(_elem548)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -17856,8 +18065,8 @@ class list_privileges_result:
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
- for iter542 in self.success:
- iter542.write(oprot)
+ for iter549 in self.success:
+ iter549.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
@@ -18182,10 +18391,10 @@ class set_ugi_args:
elif fid == 2:
if ftype == TType.LIST:
self.group_names = []
- (_etype546, _size543) = iprot.readListBegin()
- for _i547 in xrange(_size543):
- _elem548 = iprot.readString();
- self.group_names.append(_elem548)
+ (_etype553, _size550) = iprot.readListBegin()
+ for _i554 in xrange(_size550):
+ _elem555 = iprot.readString();
+ self.group_names.append(_elem555)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -18206,8 +18415,8 @@ class set_ugi_args:
if self.group_names is not None:
oprot.writeFieldBegin('group_names', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.group_names))
- for iter549 in self.group_names:
- oprot.writeString(iter549)
+ for iter556 in self.group_names:
+ oprot.writeString(iter556)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -18256,10 +18465,10 @@ class set_ugi_result:
if fid == 0:
if ftype == TType.LIST:
self.success = []
- (_etype553, _size550) = iprot.readListBegin()
- for _i554 in xrange(_size550):
- _elem555 = iprot.readString();
- self.success.append(_elem555)
+ (_etype560, _size557) = iprot.readListBegin()
+ for _i561 in xrange(_size557):
+ _elem562 = iprot.readString();
+ self.success.append(_elem562)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -18282,8 +18491,8 @@ class set_ugi_result:
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
- for iter556 in self.success:
- oprot.writeString(iter556)
+ for iter563 in self.success:
+ oprot.writeString(iter563)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
Modified: hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb?rev=1451954&r1=1451953&r2=1451954&view=diff
==============================================================================
--- hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb (original)
+++ hive/branches/ptf-windowing/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb Sat Mar 2 22:37:59 2013
@@ -844,6 +844,22 @@ module ThriftHiveMetastore
return
end
+ def partition_name_has_valid_characters(part_vals, throw_exception)
+ send_partition_name_has_valid_characters(part_vals, throw_exception)
+ return recv_partition_name_has_valid_characters()
+ end
+
+ def send_partition_name_has_valid_characters(part_vals, throw_exception)
+ send_message('partition_name_has_valid_characters', Partition_name_has_valid_characters_args, :part_vals => part_vals, :throw_exception => throw_exception)
+ end
+
+ def recv_partition_name_has_valid_characters()
+ result = receive_message(Partition_name_has_valid_characters_result)
+ return result.success unless result.success.nil?
+ raise result.o1 unless result.o1.nil?
+ raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'partition_name_has_valid_characters failed: unknown result')
+ end
+
def get_config_value(name, defaultValue)
send_get_config_value(name, defaultValue)
return recv_get_config_value()
@@ -2037,6 +2053,17 @@ module ThriftHiveMetastore
write_result(result, oprot, 'rename_partition', seqid)
end
+ def process_partition_name_has_valid_characters(seqid, iprot, oprot)
+ args = read_args(iprot, Partition_name_has_valid_characters_args)
+ result = Partition_name_has_valid_characters_result.new()
+ begin
+ result.success = @handler.partition_name_has_valid_characters(args.part_vals, args.throw_exception)
+ rescue ::MetaException => o1
+ result.o1 = o1
+ end
+ write_result(result, oprot, 'partition_name_has_valid_characters', seqid)
+ end
+
def process_get_config_value(seqid, iprot, oprot)
args = read_args(iprot, Get_config_value_args)
result = Get_config_value_result.new()
@@ -4387,6 +4414,42 @@ module ThriftHiveMetastore
::Thrift::Struct.generate_accessors self
end
+ class Partition_name_has_valid_characters_args
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ PART_VALS = 1
+ THROW_EXCEPTION = 2
+
+ FIELDS = {
+ PART_VALS => {:type => ::Thrift::Types::LIST, :name => 'part_vals', :element => {:type => ::Thrift::Types::STRING}},
+ THROW_EXCEPTION => {:type => ::Thrift::Types::BOOL, :name => 'throw_exception'}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Partition_name_has_valid_characters_result
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ SUCCESS = 0
+ O1 = 1
+
+ FIELDS = {
+ SUCCESS => {:type => ::Thrift::Types::BOOL, :name => 'success'},
+ O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::MetaException}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
class Get_config_value_args
include ::Thrift::Struct, ::Thrift::Struct_Union
NAME = 1
Modified: hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java?rev=1451954&r1=1451953&r2=1451954&view=diff
==============================================================================
--- hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java (original)
+++ hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java Sat Mar 2 22:37:59 2013
@@ -64,7 +64,7 @@ public class HiveAlterHandler implements
}
if (!MetaStoreUtils.validateName(newt.getTableName())
- || !MetaStoreUtils.validateColNames(newt.getSd().getCols())) {
+ || !MetaStoreUtils.validateTblColumns(newt.getSd().getCols())) {
throw new InvalidOperationException(newt.getTableName()
+ " is not a valid object name");
}
@@ -425,8 +425,9 @@ public class HiveAlterHandler implements
while (oldPartKeysIter.hasNext()) {
oldFs = oldPartKeysIter.next();
newFs = newPartKeysIter.next();
- if (!oldFs.getName().equals(newFs.getName()) ||
- !oldFs.getType().equals(newFs.getType())) {
+ // Alter table can change the type of partition key now.
+ // So check the column name only.
+ if (!oldFs.getName().equals(newFs.getName())) {
return false;
}
}
Modified: hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1451954&r1=1451953&r2=1451954&view=diff
==============================================================================
--- hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Sat Mar 2 22:37:59 2013
@@ -295,6 +295,7 @@ public class HiveMetaStore extends Thrif
private List<MetaStoreEventListener> listeners;
private List<MetaStoreEndFunctionListener> endFunctionListeners;
private List<MetaStoreInitListener> initListeners;
+ private Pattern partitionValidationPattern;
{
classLoader = Thread.currentThread().getContextClassLoader();
@@ -344,6 +345,14 @@ public class HiveMetaStore extends Thrif
MetaStoreEndFunctionListener.class, hiveConf,
hiveConf.getVar(HiveConf.ConfVars.METASTORE_END_FUNCTION_LISTENERS));
+ String partitionValidationRegex =
+ hiveConf.getVar(HiveConf.ConfVars.METASTORE_PARTITION_NAME_WHITELIST_PATTERN);
+ if (partitionValidationRegex != null && partitionValidationRegex != "") {
+ partitionValidationPattern = Pattern.compile(partitionValidationRegex);
+ } else {
+ partitionValidationPattern = null;
+ }
+
long cleanFreq = hiveConf.getLongVar(ConfVars.METASTORE_EVENT_CLEAN_FREQ) * 1000L;
if (cleanFreq > 0) {
// In default config, there is no timer.
@@ -995,9 +1004,9 @@ public class HiveMetaStore extends Thrif
InvalidObjectException, NoSuchObjectException {
if (!MetaStoreUtils.validateName(tbl.getTableName())
- || !MetaStoreUtils.validateColNames(tbl.getSd().getCols())
+ || !MetaStoreUtils.validateTblColumns(tbl.getSd().getCols())
|| (tbl.getPartitionKeys() != null && !MetaStoreUtils
- .validateColNames(tbl.getPartitionKeys()))
+ .validateTblColumns(tbl.getPartitionKeys()))
|| !MetaStoreUtils.validateSkewedColNames(
(null == tbl.getSd().getSkewedInfo()) ?
null : tbl.getSd().getSkewedInfo().getSkewedColNames())
@@ -1485,6 +1494,8 @@ public class HiveMetaStore extends Thrif
PreAddPartitionEvent event = new PreAddPartitionEvent(part, this);
firePreEvent(event);
+ MetaStoreUtils.validatePartitionNameCharacters(part_vals, partitionValidationPattern);
+
tbl = ms.getTable(part.getDbName(), part.getTableName());
if (tbl == null) {
throw new InvalidObjectException(
@@ -1603,8 +1614,8 @@ public class HiveMetaStore extends Thrif
private int add_partitions_core(final RawStore ms, final List<Partition> parts)
throws MetaException, InvalidObjectException, AlreadyExistsException {
String db = parts.get(0).getDbName();
- String tbl = parts.get(0).getTableName();
- logInfo("add_partitions : db=" + db + " tbl=" + tbl);
+ String tblName = parts.get(0).getTableName();
+ logInfo("add_partitions : db=" + db + " tbl=" + tblName);
boolean success = false;
Map<Partition, Boolean> addedPartitions = new HashMap<Partition, Boolean>();
@@ -1615,8 +1626,7 @@ public class HiveMetaStore extends Thrif
Entry<Partition, Boolean> e = add_partition_core_notxn(ms, part, null);
addedPartitions.put(e.getKey(), e.getValue());
}
- success = true;
- ms.commitTransaction();
+ success = ms.commitTransaction();
} finally {
if (!success) {
ms.rollbackTransaction();
@@ -1627,6 +1637,9 @@ public class HiveMetaStore extends Thrif
}
}
}
+ for (Partition part : parts) {
+ fireMetaStoreAddPartitionEvent(ms, part, null, success);
+ }
}
return parts.size();
}
@@ -1686,6 +1699,9 @@ public class HiveMetaStore extends Thrif
try {
firePreEvent(new PreAddPartitionEvent(part, this));
+ MetaStoreUtils.validatePartitionNameCharacters(part.getValues(),
+ partitionValidationPattern);
+
Partition old_part = null;
try {
old_part = ms.getPartition(part.getDbName(), part
@@ -1773,12 +1789,6 @@ public class HiveMetaStore extends Thrif
wh.deleteDir(partLocation, true);
}
}
- for (MetaStoreEventListener listener : listeners) {
- AddPartitionEvent addPartitionEvent =
- new AddPartitionEvent(tbl, part, success, this);
- addPartitionEvent.setEnvironmentContext(envContext);
- listener.onAddPartition(addPartitionEvent);
- }
}
Map<Partition, Boolean> returnVal = new HashMap<Partition, Boolean>();
returnVal.put(part, madeDir);
@@ -1800,10 +1810,23 @@ public class HiveMetaStore extends Thrif
if (!success) {
ms.rollbackTransaction();
}
+ fireMetaStoreAddPartitionEvent(ms, part, envContext, success);
}
return retPtn;
}
+ private void fireMetaStoreAddPartitionEvent(final RawStore ms,
+ final Partition part, final EnvironmentContext envContext, boolean success)
+ throws MetaException {
+ final Table tbl = ms.getTable(part.getDbName(), part.getTableName());
+ for (MetaStoreEventListener listener : listeners) {
+ AddPartitionEvent addPartitionEvent =
+ new AddPartitionEvent(tbl, part, success, this);
+ addPartitionEvent.setEnvironmentContext(envContext);
+ listener.onAddPartition(addPartitionEvent);
+ }
+ }
+
@Override
public Partition add_partition(final Partition part)
throws InvalidObjectException, AlreadyExistsException, MetaException {
@@ -2141,6 +2164,11 @@ public class HiveMetaStore extends Thrif
try {
firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, part_vals, new_part, this));
+ if (part_vals != null && !part_vals.isEmpty()) {
+ MetaStoreUtils.validatePartitionNameCharacters(new_part.getValues(),
+ partitionValidationPattern);
+ }
+
oldPart = alterHandler.alterPartition(getMS(), wh, db_name, tbl_name, part_vals, new_part);
for (MetaStoreEventListener listener : listeners) {
@@ -4041,6 +4069,34 @@ public class HiveMetaStore extends Thrif
Collections.addAll(groupNames, username);
return groupNames;
}
+
+ @Override
+ public boolean partition_name_has_valid_characters(List<String> part_vals,
+ boolean throw_exception) throws TException, MetaException {
+ startFunction("partition_name_has_valid_characters");
+ boolean ret = false;
+ Exception ex = null;
+ try {
+ if (throw_exception) {
+ MetaStoreUtils.validatePartitionNameCharacters(part_vals, partitionValidationPattern);
+ ret = true;
+ } else {
+ ret = MetaStoreUtils.partitionNameHasValidCharacters(part_vals,
+ partitionValidationPattern);
+ }
+ } catch (Exception e) {
+ if (e instanceof MetaException) {
+ throw (MetaException)e;
+ } else {
+ ex = e;
+ MetaException me = new MetaException();
+ me.initCause(e);
+ throw me;
+ }
+ }
+ endFunction("partition_name_has_valid_characters", true, null);
+ return ret;
+ }
}
public static IHMSHandler newHMSHandler(String name, HiveConf hiveConf) throws MetaException {
Modified: hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=1451954&r1=1451953&r2=1451954&view=diff
==============================================================================
--- hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
+++ hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Sat Mar 2 22:37:59 2013
@@ -424,6 +424,11 @@ public class HiveMetaStoreClient impleme
partName, envContext));
}
+ public void validatePartitionNameCharacters(List<String> partVals)
+ throws TException, MetaException {
+ client.partition_name_has_valid_characters(partVals, true);
+ }
+
/**
* Create a new Database
* @param db
Modified: hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java?rev=1451954&r1=1451953&r2=1451954&view=diff
==============================================================================
--- hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (original)
+++ hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java Sat Mar 2 22:37:59 2013
@@ -491,6 +491,15 @@ public interface IMetaStoreClient {
UnknownTableException, UnknownDBException, UnknownPartitionException, InvalidPartitionException;
/**
+ * @param partVals
+ * @throws TException
+ * @throws MetaException
+ */
+ public void validatePartitionNameCharacters(List<String> partVals)
+ throws TException, MetaException;
+
+
+ /**
* @param tbl
* @throws AlreadyExistsException
* @throws InvalidObjectException
Modified: hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=1451954&r1=1451953&r2=1451954&view=diff
==============================================================================
--- hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (original)
+++ hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java Sat Mar 2 22:37:59 2013
@@ -27,11 +27,13 @@ import java.net.ServerSocket;
import java.net.Socket;
import java.util.ArrayList;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
+import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -326,11 +328,38 @@ public class MetaStoreUtils {
return false;
}
- static public boolean validateColNames(List<FieldSchema> cols) {
+ static public boolean validateTblColumns(List<FieldSchema> cols) {
for (FieldSchema fieldSchema : cols) {
if (!validateName(fieldSchema.getName())) {
return false;
}
+ if (!validateColumnType(fieldSchema.getType())) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * validate column type
+ *
+ * if it is predefined, yes. otherwise no
+ * @param name
+ * @return
+ */
+ static public boolean validateColumnType(String type) {
+ int last = 0;
+ boolean lastAlphaDigit = Character.isLetterOrDigit(type.charAt(last));
+ for (int i = 1; i <= type.length(); i++) {
+ if (i == type.length()
+ || Character.isLetterOrDigit(type.charAt(i)) != lastAlphaDigit) {
+ String token = type.substring(last, i);
+ last = i;
+ if (!hiveThriftTypeMap.contains(token)) {
+ return false;
+ }
+ break;
+ }
}
return true;
}
@@ -419,6 +448,15 @@ public class MetaStoreUtils {
org.apache.hadoop.hive.serde.serdeConstants.DECIMAL_TYPE_NAME, "decimal");
}
+ static Set<String> hiveThriftTypeMap; //for validation
+ static {
+ hiveThriftTypeMap = new HashSet<String>();
+ hiveThriftTypeMap.addAll(org.apache.hadoop.hive.serde.serdeConstants.PrimitiveTypes);
+ hiveThriftTypeMap.addAll(org.apache.hadoop.hive.serde.serdeConstants.CollectionTypes);
+ hiveThriftTypeMap.add(org.apache.hadoop.hive.serde.serdeConstants.UNION_TYPE_NAME);
+ hiveThriftTypeMap.add(org.apache.hadoop.hive.serde.serdeConstants.STRUCT_TYPE_NAME);
+ }
+
/**
* Convert type to ThriftType. We do that by tokenizing the type and convert
* each token.
@@ -1102,4 +1140,37 @@ public class MetaStoreUtils {
}
}
+ public static void validatePartitionNameCharacters(List<String> partVals,
+ Pattern partitionValidationPattern) throws MetaException {
+
+ String invalidPartitionVal =
+ getPartitionValWithInvalidCharacter(partVals, partitionValidationPattern);
+ if (invalidPartitionVal != null) {
+ throw new MetaException("Partition value '" + invalidPartitionVal +
+ "' contains a character " + "not matched by whitelist pattern '" +
+ partitionValidationPattern.toString() + "'. " + "(configure with " +
+ HiveConf.ConfVars.METASTORE_PARTITION_NAME_WHITELIST_PATTERN.varname + ")");
+ }
+ }
+
+ public static boolean partitionNameHasValidCharacters(List<String> partVals,
+ Pattern partitionValidationPattern) {
+ return getPartitionValWithInvalidCharacter(partVals, partitionValidationPattern) == null;
+ }
+
+ private static String getPartitionValWithInvalidCharacter(List<String> partVals,
+ Pattern partitionValidationPattern) {
+ if (partitionValidationPattern == null) {
+ return null;
+ }
+
+ for (String partVal : partVals) {
+ if (!partitionValidationPattern.matcher(partVal).matches()) {
+ return partVal;
+ }
+ }
+
+ return null;
+ }
+
}
Modified: hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=1451954&r1=1451953&r2=1451954&view=diff
==============================================================================
--- hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (original)
+++ hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java Sat Mar 2 22:37:59 2013
@@ -2029,7 +2029,9 @@ public class ObjectStore implements RawS
oldp.setValues(newp.getValues());
oldp.setPartitionName(newp.getPartitionName());
oldp.setParameters(newPart.getParameters());
- copyMSD(newp.getSd(), oldp.getSd());
+ if (!TableType.VIRTUAL_VIEW.name().equals(oldp.getTable().getTableType())) {
+ copyMSD(newp.getSd(), oldp.getSd());
+ }
if (newp.getCreateTime() != oldp.getCreateTime()) {
oldp.setCreateTime(newp.getCreateTime());
}
Modified: hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/PartitionNameWhitelistPreEventListener.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/PartitionNameWhitelistPreEventListener.java?rev=1451954&r1=1451953&r2=1451954&view=diff
==============================================================================
--- hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/PartitionNameWhitelistPreEventListener.java (original)
+++ hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/PartitionNameWhitelistPreEventListener.java Sat Mar 2 22:37:59 2013
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.metastore;
-
-import java.util.List;
-import java.util.regex.Pattern;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent;
-import org.apache.hadoop.hive.metastore.events.PreEventContext;
-
-public class PartitionNameWhitelistPreEventListener extends MetaStorePreEventListener {
-
- // When enabled, this hook causes an exception to be thrown
- // if partition fields contain characters which are not
- // matched by the whitelist
-
- private static String regex;
- private static Pattern pattern;
-
- public PartitionNameWhitelistPreEventListener(Configuration config) {
- super(config);
-
- regex = config.get(HiveConf.ConfVars.METASTORE_PARTITION_NAME_WHITELIST_PATTERN.varname);
- pattern = Pattern.compile(regex);
- }
-
- @Override
- public void onEvent(PreEventContext event) throws MetaException, NoSuchObjectException,
- InvalidOperationException {
-
- switch (event.getEventType()) {
- case ADD_PARTITION:
- checkWhitelist(((PreAddPartitionEvent) event).getPartition().getValues());
- break;
- }
-
- }
-
- private static void checkWhitelist(List<String> partVals) throws MetaException {
- for (String partVal : partVals) {
- if (!pattern.matcher(partVal).matches()) {
- throw new MetaException("Partition value '" + partVal + "' contains a character "
- + "not matched by whitelist pattern '" + regex + "'. " + "(configure with "
- + HiveConf.ConfVars.METASTORE_PARTITION_NAME_WHITELIST_PATTERN.varname + ")");
- }
- }
- }
-
-}
Modified: hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java?rev=1451954&r1=1451953&r2=1451954&view=diff
==============================================================================
--- hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java (original)
+++ hive/branches/ptf-windowing/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java Sat Mar 2 22:37:59 2013
@@ -114,10 +114,12 @@ public class RetryingHMSHandler implemen
caughtException = e.getCause();
}
else {
+ LOG.error(ExceptionUtils.getStackTrace(e.getCause()));
throw e.getCause();
}
}
else {
+ LOG.error(ExceptionUtils.getStackTrace(e));
throw e;
}
} catch (InvocationTargetException e) {
@@ -127,11 +129,13 @@ public class RetryingHMSHandler implemen
caughtException = e.getCause();
}
else {
+ LOG.error(ExceptionUtils.getStackTrace(e.getCause()));
throw e.getCause();
}
}
if (retryCount >= retryLimit) {
+ LOG.error(ExceptionUtils.getStackTrace(caughtException));
// Since returning exceptions with a nested "cause" can be a problem in
// Thrift, we are stuffing the stack trace into the message itself.
throw new MetaException(ExceptionUtils.getStackTrace(caughtException));
Modified: hive/branches/ptf-windowing/metastore/src/test/org/apache/hadoop/hive/metastore/DummyListener.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/metastore/src/test/org/apache/hadoop/hive/metastore/DummyListener.java?rev=1451954&r1=1451953&r2=1451954&view=diff
==============================================================================
--- hive/branches/ptf-windowing/metastore/src/test/org/apache/hadoop/hive/metastore/DummyListener.java (original)
+++ hive/branches/ptf-windowing/metastore/src/test/org/apache/hadoop/hive/metastore/DummyListener.java Sat Mar 2 22:37:59 2013
@@ -42,6 +42,17 @@ public class DummyListener extends MetaS
public static final List<ListenerEvent> notifyList = new ArrayList<ListenerEvent>();
+ /**
+ * @return The last event received, or null if no event was received.
+ */
+ public static ListenerEvent getLastEvent() {
+ if (notifyList.isEmpty()) {
+ return null;
+ } else {
+ return notifyList.get(notifyList.size() - 1);
+ }
+ }
+
public DummyListener(Configuration config) {
super(config);
}
Modified: hive/branches/ptf-windowing/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java?rev=1451954&r1=1451953&r2=1451954&view=diff
==============================================================================
--- hive/branches/ptf-windowing/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (original)
+++ hive/branches/ptf-windowing/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java Sat Mar 2 22:37:59 2013
@@ -586,6 +586,106 @@ public abstract class TestHiveMetaStore
}
+ public void testAlterViewParititon() throws Throwable {
+ String dbName = "compdb";
+ String tblName = "comptbl";
+ String viewName = "compView";
+
+ client.dropTable(dbName, tblName);
+ silentDropDatabase(dbName);
+ Database db = new Database();
+ db.setName(dbName);
+ db.setDescription("Alter Partition Test database");
+ client.createDatabase(db);
+
+ ArrayList<FieldSchema> cols = new ArrayList<FieldSchema>(2);
+ cols.add(new FieldSchema("name", serdeConstants.STRING_TYPE_NAME, ""));
+ cols.add(new FieldSchema("income", serdeConstants.INT_TYPE_NAME, ""));
+
+ Table tbl = new Table();
+ tbl.setDbName(dbName);
+ tbl.setTableName(tblName);
+ StorageDescriptor sd = new StorageDescriptor();
+ tbl.setSd(sd);
+ sd.setCols(cols);
+ sd.setCompressed(false);
+ sd.setParameters(new HashMap<String, String>());
+ sd.setSerdeInfo(new SerDeInfo());
+ sd.getSerdeInfo().setName(tbl.getTableName());
+ sd.getSerdeInfo().setParameters(new HashMap<String, String>());
+ sd.getSerdeInfo().getParameters()
+ .put(serdeConstants.SERIALIZATION_FORMAT, "1");
+ sd.setSortCols(new ArrayList<Order>());
+
+ client.createTable(tbl);
+
+ if (isThriftClient) {
+ // the createTable() above does not update the location in the 'tbl'
+ // object when the client is a thrift client and the code below relies
+ // on the location being present in the 'tbl' object - so get the table
+ // from the metastore
+ tbl = client.getTable(dbName, tblName);
+ }
+
+ ArrayList<FieldSchema> viewCols = new ArrayList<FieldSchema>(1);
+ viewCols.add(new FieldSchema("income", serdeConstants.INT_TYPE_NAME, ""));
+
+ ArrayList<FieldSchema> viewPartitionCols = new ArrayList<FieldSchema>(1);
+ viewPartitionCols.add(new FieldSchema("name", serdeConstants.STRING_TYPE_NAME, ""));
+
+ Table view = new Table();
+ view.setDbName(dbName);
+ view.setTableName(viewName);
+ view.setTableType(TableType.VIRTUAL_VIEW.name());
+ view.setPartitionKeys(viewPartitionCols);
+ view.setViewOriginalText("SELECT income, name FROM " + tblName);
+ view.setViewExpandedText("SELECT `" + tblName + "`.`income`, `" + tblName +
+ "`.`name` FROM `" + dbName + "`.`" + tblName + "`");
+ StorageDescriptor viewSd = new StorageDescriptor();
+ view.setSd(viewSd);
+ viewSd.setCols(viewCols);
+ viewSd.setCompressed(false);
+ viewSd.setParameters(new HashMap<String, String>());
+ viewSd.setSerdeInfo(new SerDeInfo());
+ viewSd.getSerdeInfo().setParameters(new HashMap<String, String>());
+
+ client.createTable(view);
+
+ if (isThriftClient) {
+ // the createTable() above does not update the location in the 'tbl'
+ // object when the client is a thrift client and the code below relies
+ // on the location being present in the 'tbl' object - so get the table
+ // from the metastore
+ view = client.getTable(dbName, viewName);
+ }
+
+ List<String> vals = new ArrayList<String>(1);
+ vals.add("abc");
+
+ Partition part = new Partition();
+ part.setDbName(dbName);
+ part.setTableName(viewName);
+ part.setValues(vals);
+ part.setParameters(new HashMap<String, String>());
+
+ client.add_partition(part);
+
+ Partition part2 = client.getPartition(dbName, viewName, part.getValues());
+
+ part2.getParameters().put("a", "b");
+
+ client.alter_partition(dbName, viewName, part2);
+
+ Partition part3 = client.getPartition(dbName, viewName, part.getValues());
+ assertEquals("couldn't view alter partition", part3.getParameters().get(
+ "a"), "b");
+
+ client.dropTable(dbName, viewName);
+
+ client.dropTable(dbName, tblName);
+
+ client.dropDatabase(dbName);
+ }
public void testAlterPartition() throws Throwable {
@@ -854,8 +954,8 @@ public abstract class TestHiveMetaStore
public void testDatabaseLocationWithPermissionProblems() throws Exception {
- // Note: The following test will fail if you are running this test as root. Setting
- // permission to '0' on the database folder will not preclude root from being able
+ // Note: The following test will fail if you are running this test as root. Setting
+ // permission to '0' on the database folder will not preclude root from being able
// to create the necessary files.
if (System.getProperty("user.name").equals("root")) {
@@ -1482,6 +1582,24 @@ public abstract class TestHiveMetaStore
assertTrue("Able to create table with invalid name: " + invTblName,
false);
}
+
+ // create an invalid table which has wrong column type
+ ArrayList<FieldSchema> invColsInvType = new ArrayList<FieldSchema>(2);
+ invColsInvType.add(new FieldSchema("name", serdeConstants.STRING_TYPE_NAME, ""));
+ invColsInvType.add(new FieldSchema("income", "xyz", ""));
+ tbl.setTableName(tblName);
+ tbl.getSd().setCols(invColsInvType);
+ boolean failChecker = false;
+ try {
+ client.createTable(tbl);
+ } catch (InvalidObjectException ex) {
+ failChecker = true;
+ }
+ if (!failChecker) {
+ assertTrue("Able to create table with invalid column type: " + invTblName,
+ false);
+ }
+
ArrayList<FieldSchema> cols = new ArrayList<FieldSchema>(2);
cols.add(new FieldSchema("name", serdeConstants.STRING_TYPE_NAME, ""));
cols.add(new FieldSchema("income", serdeConstants.INT_TYPE_NAME, ""));
@@ -1561,6 +1679,17 @@ public abstract class TestHiveMetaStore
assertEquals("alter table didn't move data correct location", tbl3
.getSd().getLocation(), tbl2.getSd().getLocation());
}
+
+ // alter table with invalid column type
+ tbl_pk.getSd().setCols(invColsInvType);
+ failed = false;
+ try {
+ client.alter_table(dbName, tbl2.getTableName(), tbl_pk);
+ } catch (InvalidOperationException ex) {
+ failed = true;
+ }
+ assertTrue("Should not have succeeded in altering column", failed);
+
} catch (Exception e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testSimpleTable() failed.");
@@ -1757,7 +1886,7 @@ public abstract class TestHiveMetaStore
} catch (TException e) {
e.printStackTrace();
assert (false);
- }
+ }
assert (threwException);
}
@@ -2106,7 +2235,7 @@ public abstract class TestHiveMetaStore
* at least works correctly.
*/
public void testSynchronized() throws Exception {
- int currentNumberOfDbs = client.getAllDatabases().size();
+ int currentNumberOfDbs = client.getAllDatabases().size();
IMetaStoreClient synchronizedClient =
HiveMetaStoreClient.newSynchronizedClient(client);
@@ -2340,6 +2469,8 @@ public abstract class TestHiveMetaStore
}
}
+
+
/**
* This method simulates another Hive metastore renaming a table, by accessing the db and
* updating the name.