You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@cassandra.apache.org by jb...@apache.org on 2011/09/06 18:21:30 UTC
svn commit: r1165750 [1/2] - in /cassandra/trunk:
drivers/java/src/org/apache/cassandra/cql/jdbc/
drivers/java/test/org/apache/cassandra/cql/ drivers/py/cql/
drivers/py/cql/cassandra/ interface/
interface/thrift/gen-java/org/apache/cassandra/thrift/ sr...
Author: jbellis
Date: Tue Sep 6 16:21:29 2011
New Revision: 1165750
URL: http://svn.apache.org/viewvc?rev=1165750&view=rev
Log:
include schema with CQL resultset
patch by jbellis; reviewed by thobbs for CASSANDRA-2734
Added:
cassandra/trunk/interface/thrift/gen-java/org/apache/cassandra/thrift/CqlMetadata.java
Modified:
cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CResultSet.java
cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CassandraResultSet.java
cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CassandraStatement.java
cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/ColumnDecoder.java
cassandra/trunk/drivers/java/test/org/apache/cassandra/cql/EmbeddedServiceBase.java
cassandra/trunk/drivers/java/test/org/apache/cassandra/cql/JdbcDriverTest.java
cassandra/trunk/drivers/py/cql/cassandra/Cassandra.py
cassandra/trunk/drivers/py/cql/cassandra/constants.py
cassandra/trunk/drivers/py/cql/cassandra/ttypes.py
cassandra/trunk/drivers/py/cql/cursor.py
cassandra/trunk/drivers/py/cql/decoders.py
cassandra/trunk/drivers/py/cql/marshal.py
cassandra/trunk/interface/cassandra.thrift
cassandra/trunk/interface/thrift/gen-java/org/apache/cassandra/thrift/Cassandra.java
cassandra/trunk/interface/thrift/gen-java/org/apache/cassandra/thrift/Constants.java
cassandra/trunk/interface/thrift/gen-java/org/apache/cassandra/thrift/CqlResult.java
cassandra/trunk/src/java/org/apache/cassandra/cli/CliClient.java
cassandra/trunk/src/java/org/apache/cassandra/config/Schema.java
cassandra/trunk/src/java/org/apache/cassandra/cql/QueryProcessor.java
cassandra/trunk/src/java/org/apache/cassandra/cql/jdbc/TypesMap.java
cassandra/trunk/src/java/org/apache/cassandra/db/marshal/AbstractType.java
cassandra/trunk/src/java/org/apache/cassandra/db/marshal/TypeParser.java
Modified: cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CResultSet.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CResultSet.java?rev=1165750&r1=1165749&r2=1165750&view=diff
==============================================================================
--- cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CResultSet.java (original)
+++ cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CResultSet.java Tue Sep 6 16:21:29 2011
@@ -31,6 +31,7 @@ import java.sql.Date;
import java.util.*;
import org.apache.cassandra.thrift.Column;
+import org.apache.cassandra.thrift.CqlMetadata;
import org.apache.cassandra.thrift.CqlResult;
import org.apache.cassandra.thrift.CqlRow;
import org.apache.cassandra.utils.ByteBufferUtil;
@@ -41,11 +42,8 @@ class CResultSet extends AbstractResultS
public static final int DEFAULT_CONCURRENCY = ResultSet.CONCUR_READ_ONLY;
public static final int DEFAULT_HOLDABILITY = ResultSet.HOLD_CURSORS_OVER_COMMIT;
- private final ColumnDecoder decoder;
private final String keyspace;
- private final String columnFamily;
-
/**
* The r set iter.
*/
@@ -55,8 +53,6 @@ class CResultSet extends AbstractResultS
// the current row key when iterating through results.
private byte[] curRowKey = null;
- private TypedColumn typedCurRowKey = null;
-
/**
* The values.
*/
@@ -84,14 +80,14 @@ class CResultSet extends AbstractResultS
private boolean wasNull;
+ private CqlMetadata schema;
+
/**
* no argument constructor.
*/
CResultSet()
{
keyspace = null;
- columnFamily = null;
- decoder = null;
statement = null;
meta = new CResultSetMetaData();
}
@@ -99,16 +95,15 @@ class CResultSet extends AbstractResultS
/**
* Instantiates a new cassandra result set.
*/
- CResultSet(Statement statement, CqlResult resultSet, ColumnDecoder decoder, String keyspace, String columnFamily) throws SQLException
+ CResultSet(Statement statement, CqlResult resultSet, String keyspace) throws SQLException
{
this.statement = statement;
+ this.keyspace = keyspace;
this.resultSetType = statement.getResultSetType();
this.fetchDirection = statement.getFetchDirection();
this.fetchSize = statement.getFetchSize();
+ this.schema = resultSet.schema;
- this.decoder = decoder;
- this.keyspace = keyspace;
- this.columnFamily = columnFamily;
rSetIter = resultSet.getRowsIterator();
meta = new CResultSetMetaData();
}
@@ -771,11 +766,6 @@ class CResultSet extends AbstractResultS
return resultSetType;
}
- public TypedColumn getTypedKey() throws SQLException
- {
- return typedCurRowKey;
- }
-
// URL (awaiting some clarifications as to how it is stored in C* ... just a validated Sting in URL format?
public URL getURL(int arg0) throws SQLException
{
@@ -853,13 +843,11 @@ class CResultSet extends AbstractResultS
CqlRow row = rSetIter.next();
rowNumber++;
curRowKey = row.getKey();
- typedCurRowKey = decoder.makeKeyColumn(keyspace, columnFamily, curRowKey);
List<Column> cols = row.getColumns();
for (Column col : cols)
{
-
- TypedColumn c = decoder.makeCol(keyspace, columnFamily, col);
- String columnName = decoder.colNameAsString(keyspace, columnFamily, col.name);
+ TypedColumn c = createColumn(col);
+ String columnName = c.getNameString();
values.add(c);
indexMap.put(columnName, values.size()); // one greater than 0 based index of a list
valueMap.put(columnName, c);
@@ -873,6 +861,15 @@ class CResultSet extends AbstractResultS
}
}
+ private TypedColumn createColumn(Column column)
+ {
+ String nameType = schema.name_types.get(column.name);
+ AbstractJdbcType<?> comparator = TypesMap.getTypeForComparator(nameType == null ? schema.default_name_type : nameType);
+ String valueType = schema.value_types.get(column.name);
+ AbstractJdbcType<?> validator = TypesMap.getTypeForComparator(valueType == null ? schema.default_value_type : valueType);
+ return new TypedColumn(column, comparator, validator);
+ }
+
public boolean previous() throws SQLException
{
throw new SQLFeatureNotSupportedException(NOT_SUPPORTED);
@@ -991,8 +988,7 @@ class CResultSet extends AbstractResultS
public String getTableName(int column) throws SQLException
{
- checkIndex(column);
- return columnFamily;
+ throw new SQLFeatureNotSupportedException();
}
public boolean isAutoIncrement(int column) throws SQLException
Modified: cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CassandraResultSet.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CassandraResultSet.java?rev=1165750&r1=1165749&r2=1165750&view=diff
==============================================================================
--- cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CassandraResultSet.java (original)
+++ cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CassandraResultSet.java Tue Sep 6 16:21:29 2011
@@ -31,8 +31,6 @@ public interface CassandraResultSet exte
*/
public byte[] getKey()throws SQLException;;
- public TypedColumn getTypedKey()throws SQLException;;
-
/** @return a BigInteger value for the given column offset*/
public BigInteger getBigInteger(int i) throws SQLException;
/** @return a BigInteger value for the given column name */
Modified: cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CassandraStatement.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CassandraStatement.java?rev=1165750&r1=1165749&r2=1165750&view=diff
==============================================================================
--- cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CassandraStatement.java (original)
+++ cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/CassandraStatement.java Tue Sep 6 16:21:29 2011
@@ -154,12 +154,11 @@ class CassandraStatement extends Abstrac
resetResults();
CqlResult rSet = connection.execute(sql);
String keyspace = connection.currentKeyspace;
- String columnfamily = determineCurrentColumnFamily(sql);
switch (rSet.getType())
{
case ROWS:
- currentResultSet = new CResultSet(this, rSet, connection.decoder, keyspace, columnfamily);
+ currentResultSet = new CResultSet(this, rSet, keyspace);
break;
case INT:
updateCount = rSet.getNum();
Modified: cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/ColumnDecoder.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/ColumnDecoder.java?rev=1165750&r1=1165749&r2=1165750&view=diff
==============================================================================
--- cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/ColumnDecoder.java (original)
+++ cassandra/trunk/drivers/java/src/org/apache/cassandra/cql/jdbc/ColumnDecoder.java Tue Sep 6 16:21:29 2011
@@ -76,7 +76,7 @@ class ColumnDecoder
protected AbstractJdbcType<?> getComparator(String keyspace, String columnFamily)
{
CFamMeta cf = metadata.get(String.format("%s.%s", keyspace, columnFamily));
- AbstractJdbcType<?> type = (cf != null) ? TypesMap.getTermForComparator(cf.comparator) : null;
+ AbstractJdbcType<?> type = (cf != null) ? TypesMap.getTypeForComparator(cf.comparator) : null;
return (type == null) ? null : type;
}
@@ -92,7 +92,7 @@ class ColumnDecoder
{
// not be the key name
}
- return TypesMap.getTermForComparator(cf.comparator);
+ return TypesMap.getTypeForComparator(cf.comparator);
}
private AbstractJdbcType<?> getValueType(String keyspace, String columnFamily, ByteBuffer name)
@@ -104,21 +104,21 @@ class ColumnDecoder
try
{
if (ByteBufferUtil.string(name).equalsIgnoreCase(ByteBufferUtil.string(cf.keyAlias)))
- return TypesMap.getTermForComparator(cf.keyValidator);
+ return TypesMap.getTypeForComparator(cf.keyValidator);
}
catch (CharacterCodingException e)
{
// not be the key name
}
- AbstractJdbcType<?> type = TypesMap.getTermForComparator(cf.columnMeta.get(name));
- return (type != null) ? type : TypesMap.getTermForComparator(cf.defaultValidator);
+ AbstractJdbcType<?> type = TypesMap.getTypeForComparator(cf.columnMeta.get(name));
+ return (type != null) ? type : TypesMap.getTypeForComparator(cf.defaultValidator);
}
public AbstractJdbcType<?> getKeyValidator(String keyspace, String columnFamily)
{
CFamMeta cf = metadata.get(String.format("%s.%s", keyspace, columnFamily));
- AbstractJdbcType<?> type = (cf != null) ? TypesMap.getTermForComparator(cf.keyValidator) : null;
+ AbstractJdbcType<?> type = (cf != null) ? TypesMap.getTypeForComparator(cf.keyValidator) : null;
return (type == null) ? null : type;
}
Modified: cassandra/trunk/drivers/java/test/org/apache/cassandra/cql/EmbeddedServiceBase.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/drivers/java/test/org/apache/cassandra/cql/EmbeddedServiceBase.java?rev=1165750&r1=1165749&r2=1165750&view=diff
==============================================================================
--- cassandra/trunk/drivers/java/test/org/apache/cassandra/cql/EmbeddedServiceBase.java (original)
+++ cassandra/trunk/drivers/java/test/org/apache/cassandra/cql/EmbeddedServiceBase.java Tue Sep 6 16:21:29 2011
@@ -25,12 +25,18 @@ import java.io.IOException;
import java.net.Socket;
import java.net.UnknownHostException;
+import org.junit.BeforeClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import org.apache.cassandra.CleanupHelper;
import org.apache.cassandra.SchemaLoader;
-import org.apache.cassandra.config.*;
+import org.apache.cassandra.config.CFMetaData;
+import org.apache.cassandra.config.ConfigurationException;
+import org.apache.cassandra.config.KSMetaData;
+import org.apache.cassandra.config.Schema;
import org.apache.cassandra.service.AbstractCassandraDaemon;
import org.apache.cassandra.service.EmbeddedCassandraService;
-import org.junit.BeforeClass;
/**
* The abstract BaseClass.
@@ -45,6 +51,8 @@ public abstract class EmbeddedServiceBas
AbstractCassandraDaemon.initLog4j();
}
+ private static final Logger logger = LoggerFactory.getLogger(EmbeddedServiceBase.class);
+
@BeforeClass
public static void cleanUpOldStuff() throws IOException
{
@@ -61,6 +69,7 @@ public abstract class EmbeddedServiceBas
{
if (!checkIfServerRunning())
{
+ logger.debug("Starting embeddeded server");
loadData();
cassandra = new EmbeddedCassandraService();
cassandra.start();
Modified: cassandra/trunk/drivers/java/test/org/apache/cassandra/cql/JdbcDriverTest.java
URL: http://svn.apache.org/viewvc/cassandra/trunk/drivers/java/test/org/apache/cassandra/cql/JdbcDriverTest.java?rev=1165750&r1=1165749&r2=1165750&view=diff
==============================================================================
--- cassandra/trunk/drivers/java/test/org/apache/cassandra/cql/JdbcDriverTest.java (original)
+++ cassandra/trunk/drivers/java/test/org/apache/cassandra/cql/JdbcDriverTest.java Tue Sep 6 16:21:29 2011
@@ -90,7 +90,6 @@ public class JdbcDriverTest extends Embe
String label, int type, String typeName, boolean signed, boolean caseSensitive) throws SQLException
{
assertEquals(colClass, md.getColumnClassName(col)); // full class name of type<T>
- assertEquals(table, md.getTableName(col));
assertEquals(schema, md.getSchemaName(col));
assertEquals(label, md.getColumnLabel(col));
assertEquals(label, md.getColumnName(col));
@@ -458,10 +457,6 @@ public class JdbcDriverTest extends Embe
while (rs.next())
{
actualRows++;
- if (keyIter != null)
- {
- assert cassandraRs.getTypedKey().getValueString().equals(keyIter.next());
- }
for (int c = 0; c < cols.length; c++)
{
Modified: cassandra/trunk/drivers/py/cql/cassandra/Cassandra.py
URL: http://svn.apache.org/viewvc/cassandra/trunk/drivers/py/cql/cassandra/Cassandra.py?rev=1165750&r1=1165749&r2=1165750&view=diff
==============================================================================
--- cassandra/trunk/drivers/py/cql/cassandra/Cassandra.py (original)
+++ cassandra/trunk/drivers/py/cql/cassandra/Cassandra.py Tue Sep 6 16:21:29 2011
@@ -2670,11 +2670,11 @@ class get_slice_result:
if fid == 0:
if ftype == TType.LIST:
self.success = []
- (_etype91, _size88) = iprot.readListBegin()
- for _i92 in xrange(_size88):
- _elem93 = ColumnOrSuperColumn()
- _elem93.read(iprot)
- self.success.append(_elem93)
+ (_etype136, _size133) = iprot.readListBegin()
+ for _i137 in xrange(_size133):
+ _elem138 = ColumnOrSuperColumn()
+ _elem138.read(iprot)
+ self.success.append(_elem138)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -2709,8 +2709,8 @@ class get_slice_result:
if self.success != None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
- for iter94 in self.success:
- iter94.write(oprot)
+ for iter139 in self.success:
+ iter139.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.ire != None:
@@ -2979,10 +2979,10 @@ class multiget_slice_args:
if fid == 1:
if ftype == TType.LIST:
self.keys = []
- (_etype98, _size95) = iprot.readListBegin()
- for _i99 in xrange(_size95):
- _elem100 = iprot.readString();
- self.keys.append(_elem100)
+ (_etype143, _size140) = iprot.readListBegin()
+ for _i144 in xrange(_size140):
+ _elem145 = iprot.readString();
+ self.keys.append(_elem145)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -3016,8 +3016,8 @@ class multiget_slice_args:
if self.keys != None:
oprot.writeFieldBegin('keys', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.keys))
- for iter101 in self.keys:
- oprot.writeString(iter101)
+ for iter146 in self.keys:
+ oprot.writeString(iter146)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.column_parent != None:
@@ -3091,17 +3091,17 @@ class multiget_slice_result:
if fid == 0:
if ftype == TType.MAP:
self.success = {}
- (_ktype103, _vtype104, _size102 ) = iprot.readMapBegin()
- for _i106 in xrange(_size102):
- _key107 = iprot.readString();
- _val108 = []
- (_etype112, _size109) = iprot.readListBegin()
- for _i113 in xrange(_size109):
- _elem114 = ColumnOrSuperColumn()
- _elem114.read(iprot)
- _val108.append(_elem114)
+ (_ktype148, _vtype149, _size147 ) = iprot.readMapBegin()
+ for _i151 in xrange(_size147):
+ _key152 = iprot.readString();
+ _val153 = []
+ (_etype157, _size154) = iprot.readListBegin()
+ for _i158 in xrange(_size154):
+ _elem159 = ColumnOrSuperColumn()
+ _elem159.read(iprot)
+ _val153.append(_elem159)
iprot.readListEnd()
- self.success[_key107] = _val108
+ self.success[_key152] = _val153
iprot.readMapEnd()
else:
iprot.skip(ftype)
@@ -3136,11 +3136,11 @@ class multiget_slice_result:
if self.success != None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.STRING, TType.LIST, len(self.success))
- for kiter115,viter116 in self.success.items():
- oprot.writeString(kiter115)
- oprot.writeListBegin(TType.STRUCT, len(viter116))
- for iter117 in viter116:
- iter117.write(oprot)
+ for kiter160,viter161 in self.success.items():
+ oprot.writeString(kiter160)
+ oprot.writeListBegin(TType.STRUCT, len(viter161))
+ for iter162 in viter161:
+ iter162.write(oprot)
oprot.writeListEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
@@ -3208,10 +3208,10 @@ class multiget_count_args:
if fid == 1:
if ftype == TType.LIST:
self.keys = []
- (_etype121, _size118) = iprot.readListBegin()
- for _i122 in xrange(_size118):
- _elem123 = iprot.readString();
- self.keys.append(_elem123)
+ (_etype166, _size163) = iprot.readListBegin()
+ for _i167 in xrange(_size163):
+ _elem168 = iprot.readString();
+ self.keys.append(_elem168)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -3245,8 +3245,8 @@ class multiget_count_args:
if self.keys != None:
oprot.writeFieldBegin('keys', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.keys))
- for iter124 in self.keys:
- oprot.writeString(iter124)
+ for iter169 in self.keys:
+ oprot.writeString(iter169)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.column_parent != None:
@@ -3320,11 +3320,11 @@ class multiget_count_result:
if fid == 0:
if ftype == TType.MAP:
self.success = {}
- (_ktype126, _vtype127, _size125 ) = iprot.readMapBegin()
- for _i129 in xrange(_size125):
- _key130 = iprot.readString();
- _val131 = iprot.readI32();
- self.success[_key130] = _val131
+ (_ktype171, _vtype172, _size170 ) = iprot.readMapBegin()
+ for _i174 in xrange(_size170):
+ _key175 = iprot.readString();
+ _val176 = iprot.readI32();
+ self.success[_key175] = _val176
iprot.readMapEnd()
else:
iprot.skip(ftype)
@@ -3359,9 +3359,9 @@ class multiget_count_result:
if self.success != None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.STRING, TType.I32, len(self.success))
- for kiter132,viter133 in self.success.items():
- oprot.writeString(kiter132)
- oprot.writeI32(viter133)
+ for kiter177,viter178 in self.success.items():
+ oprot.writeString(kiter177)
+ oprot.writeI32(viter178)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.ire != None:
@@ -3533,11 +3533,11 @@ class get_range_slices_result:
if fid == 0:
if ftype == TType.LIST:
self.success = []
- (_etype137, _size134) = iprot.readListBegin()
- for _i138 in xrange(_size134):
- _elem139 = KeySlice()
- _elem139.read(iprot)
- self.success.append(_elem139)
+ (_etype182, _size179) = iprot.readListBegin()
+ for _i183 in xrange(_size179):
+ _elem184 = KeySlice()
+ _elem184.read(iprot)
+ self.success.append(_elem184)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -3572,8 +3572,8 @@ class get_range_slices_result:
if self.success != None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
- for iter140 in self.success:
- iter140.write(oprot)
+ for iter185 in self.success:
+ iter185.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.ire != None:
@@ -3745,11 +3745,11 @@ class get_indexed_slices_result:
if fid == 0:
if ftype == TType.LIST:
self.success = []
- (_etype144, _size141) = iprot.readListBegin()
- for _i145 in xrange(_size141):
- _elem146 = KeySlice()
- _elem146.read(iprot)
- self.success.append(_elem146)
+ (_etype189, _size186) = iprot.readListBegin()
+ for _i190 in xrange(_size186):
+ _elem191 = KeySlice()
+ _elem191.read(iprot)
+ self.success.append(_elem191)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -3784,8 +3784,8 @@ class get_indexed_slices_result:
if self.success != None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
- for iter147 in self.success:
- iter147.write(oprot)
+ for iter192 in self.success:
+ iter192.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.ire != None:
@@ -4592,23 +4592,23 @@ class batch_mutate_args:
if fid == 1:
if ftype == TType.MAP:
self.mutation_map = {}
- (_ktype149, _vtype150, _size148 ) = iprot.readMapBegin()
- for _i152 in xrange(_size148):
- _key153 = iprot.readString();
- _val154 = {}
- (_ktype156, _vtype157, _size155 ) = iprot.readMapBegin()
- for _i159 in xrange(_size155):
- _key160 = iprot.readString();
- _val161 = []
- (_etype165, _size162) = iprot.readListBegin()
- for _i166 in xrange(_size162):
- _elem167 = Mutation()
- _elem167.read(iprot)
- _val161.append(_elem167)
+ (_ktype194, _vtype195, _size193 ) = iprot.readMapBegin()
+ for _i197 in xrange(_size193):
+ _key198 = iprot.readString();
+ _val199 = {}
+ (_ktype201, _vtype202, _size200 ) = iprot.readMapBegin()
+ for _i204 in xrange(_size200):
+ _key205 = iprot.readString();
+ _val206 = []
+ (_etype210, _size207) = iprot.readListBegin()
+ for _i211 in xrange(_size207):
+ _elem212 = Mutation()
+ _elem212.read(iprot)
+ _val206.append(_elem212)
iprot.readListEnd()
- _val154[_key160] = _val161
+ _val199[_key205] = _val206
iprot.readMapEnd()
- self.mutation_map[_key153] = _val154
+ self.mutation_map[_key198] = _val199
iprot.readMapEnd()
else:
iprot.skip(ftype)
@@ -4630,14 +4630,14 @@ class batch_mutate_args:
if self.mutation_map != None:
oprot.writeFieldBegin('mutation_map', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.mutation_map))
- for kiter168,viter169 in self.mutation_map.items():
- oprot.writeString(kiter168)
- oprot.writeMapBegin(TType.STRING, TType.LIST, len(viter169))
- for kiter170,viter171 in viter169.items():
- oprot.writeString(kiter170)
- oprot.writeListBegin(TType.STRUCT, len(viter171))
- for iter172 in viter171:
- iter172.write(oprot)
+ for kiter213,viter214 in self.mutation_map.items():
+ oprot.writeString(kiter213)
+ oprot.writeMapBegin(TType.STRING, TType.LIST, len(viter214))
+ for kiter215,viter216 in viter214.items():
+ oprot.writeString(kiter215)
+ oprot.writeListBegin(TType.STRUCT, len(viter216))
+ for iter217 in viter216:
+ iter217.write(oprot)
oprot.writeListEnd()
oprot.writeMapEnd()
oprot.writeMapEnd()
@@ -4956,16 +4956,16 @@ class describe_schema_versions_result:
if fid == 0:
if ftype == TType.MAP:
self.success = {}
- (_ktype174, _vtype175, _size173 ) = iprot.readMapBegin()
- for _i177 in xrange(_size173):
- _key178 = iprot.readString();
- _val179 = []
- (_etype183, _size180) = iprot.readListBegin()
- for _i184 in xrange(_size180):
- _elem185 = iprot.readString();
- _val179.append(_elem185)
+ (_ktype219, _vtype220, _size218 ) = iprot.readMapBegin()
+ for _i222 in xrange(_size218):
+ _key223 = iprot.readString();
+ _val224 = []
+ (_etype228, _size225) = iprot.readListBegin()
+ for _i229 in xrange(_size225):
+ _elem230 = iprot.readString();
+ _val224.append(_elem230)
iprot.readListEnd()
- self.success[_key178] = _val179
+ self.success[_key223] = _val224
iprot.readMapEnd()
else:
iprot.skip(ftype)
@@ -4988,11 +4988,11 @@ class describe_schema_versions_result:
if self.success != None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.STRING, TType.LIST, len(self.success))
- for kiter186,viter187 in self.success.items():
- oprot.writeString(kiter186)
- oprot.writeListBegin(TType.STRING, len(viter187))
- for iter188 in viter187:
- oprot.writeString(iter188)
+ for kiter231,viter232 in self.success.items():
+ oprot.writeString(kiter231)
+ oprot.writeListBegin(TType.STRING, len(viter232))
+ for iter233 in viter232:
+ oprot.writeString(iter233)
oprot.writeListEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
@@ -5086,11 +5086,11 @@ class describe_keyspaces_result:
if fid == 0:
if ftype == TType.LIST:
self.success = []
- (_etype192, _size189) = iprot.readListBegin()
- for _i193 in xrange(_size189):
- _elem194 = KsDef()
- _elem194.read(iprot)
- self.success.append(_elem194)
+ (_etype237, _size234) = iprot.readListBegin()
+ for _i238 in xrange(_size234):
+ _elem239 = KsDef()
+ _elem239.read(iprot)
+ self.success.append(_elem239)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -5113,8 +5113,8 @@ class describe_keyspaces_result:
if self.success != None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
- for iter195 in self.success:
- iter195.write(oprot)
+ for iter240 in self.success:
+ iter240.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.ire != None:
@@ -5425,11 +5425,11 @@ class describe_ring_result:
if fid == 0:
if ftype == TType.LIST:
self.success = []
- (_etype199, _size196) = iprot.readListBegin()
- for _i200 in xrange(_size196):
- _elem201 = TokenRange()
- _elem201.read(iprot)
- self.success.append(_elem201)
+ (_etype244, _size241) = iprot.readListBegin()
+ for _i245 in xrange(_size241):
+ _elem246 = TokenRange()
+ _elem246.read(iprot)
+ self.success.append(_elem246)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -5452,8 +5452,8 @@ class describe_ring_result:
if self.success != None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
- for iter202 in self.success:
- iter202.write(oprot)
+ for iter247 in self.success:
+ iter247.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.ire != None:
@@ -5952,10 +5952,10 @@ class describe_splits_result:
if fid == 0:
if ftype == TType.LIST:
self.success = []
- (_etype206, _size203) = iprot.readListBegin()
- for _i207 in xrange(_size203):
- _elem208 = iprot.readString();
- self.success.append(_elem208)
+ (_etype251, _size248) = iprot.readListBegin()
+ for _i252 in xrange(_size248):
+ _elem253 = iprot.readString();
+ self.success.append(_elem253)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -5978,8 +5978,8 @@ class describe_splits_result:
if self.success != None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
- for iter209 in self.success:
- oprot.writeString(iter209)
+ for iter254 in self.success:
+ oprot.writeString(iter254)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.ire != None:
Modified: cassandra/trunk/drivers/py/cql/cassandra/constants.py
URL: http://svn.apache.org/viewvc/cassandra/trunk/drivers/py/cql/cassandra/constants.py?rev=1165750&r1=1165749&r2=1165750&view=diff
==============================================================================
--- cassandra/trunk/drivers/py/cql/cassandra/constants.py (original)
+++ cassandra/trunk/drivers/py/cql/cassandra/constants.py Tue Sep 6 16:21:29 2011
@@ -7,4 +7,4 @@
from thrift.Thrift import *
from ttypes import *
-VERSION = "19.10.0"
+VERSION = "19.13.0"
Modified: cassandra/trunk/drivers/py/cql/cassandra/ttypes.py
URL: http://svn.apache.org/viewvc/cassandra/trunk/drivers/py/cql/cassandra/ttypes.py?rev=1165750&r1=1165749&r2=1165750&view=diff
==============================================================================
--- cassandra/trunk/drivers/py/cql/cassandra/ttypes.py (original)
+++ cassandra/trunk/drivers/py/cql/cassandra/ttypes.py Tue Sep 6 16:21:29 2011
@@ -113,13 +113,16 @@ class IndexOperator:
class IndexType:
KEYS = 0
+ CUSTOM = 1
_VALUES_TO_NAMES = {
0: "KEYS",
+ 1: "CUSTOM",
}
_NAMES_TO_VALUES = {
"KEYS": 0,
+ "CUSTOM": 1,
}
class Compression:
@@ -2201,6 +2204,7 @@ class ColumnDef:
- validation_class
- index_type
- index_name
+ - index_options
"""
thrift_spec = (
@@ -2209,13 +2213,15 @@ class ColumnDef:
(2, TType.STRING, 'validation_class', None, None, ), # 2
(3, TType.I32, 'index_type', None, None, ), # 3
(4, TType.STRING, 'index_name', None, None, ), # 4
+ (5, TType.MAP, 'index_options', (TType.STRING,None,TType.STRING,None), None, ), # 5
)
- def __init__(self, name=None, validation_class=None, index_type=None, index_name=None,):
+ def __init__(self, name=None, validation_class=None, index_type=None, index_name=None, index_options=None,):
self.name = name
self.validation_class = validation_class
self.index_type = index_type
self.index_name = index_name
+ self.index_options = index_options
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -2246,6 +2252,17 @@ class ColumnDef:
self.index_name = iprot.readString();
else:
iprot.skip(ftype)
+ elif fid == 5:
+ if ftype == TType.MAP:
+ self.index_options = {}
+ (_ktype52, _vtype53, _size51 ) = iprot.readMapBegin()
+ for _i55 in xrange(_size51):
+ _key56 = iprot.readString();
+ _val57 = iprot.readString();
+ self.index_options[_key56] = _val57
+ iprot.readMapEnd()
+ else:
+ iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
@@ -2272,6 +2289,14 @@ class ColumnDef:
oprot.writeFieldBegin('index_name', TType.STRING, 4)
oprot.writeString(self.index_name)
oprot.writeFieldEnd()
+ if self.index_options != None:
+ oprot.writeFieldBegin('index_options', TType.MAP, 5)
+ oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.index_options))
+ for kiter58,viter59 in self.index_options.items():
+ oprot.writeString(kiter58)
+ oprot.writeString(viter59)
+ oprot.writeMapEnd()
+ oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
@@ -2313,7 +2338,6 @@ class CfDef:
- max_compaction_threshold
- row_cache_save_period_in_seconds
- key_cache_save_period_in_seconds
- - memtable_flush_after_mins
- memtable_throughput_in_mb
- memtable_operations_in_millions
- replicate_on_write
@@ -2321,6 +2345,10 @@ class CfDef:
- key_validation_class
- row_cache_provider
- key_alias
+ - compaction_strategy
+ - compaction_strategy_options
+ - row_cache_keys_to_save
+ - compression_options
"""
thrift_spec = (
@@ -2345,7 +2373,7 @@ class CfDef:
(18, TType.I32, 'max_compaction_threshold', None, None, ), # 18
(19, TType.I32, 'row_cache_save_period_in_seconds', None, None, ), # 19
(20, TType.I32, 'key_cache_save_period_in_seconds', None, None, ), # 20
- (21, TType.I32, 'memtable_flush_after_mins', None, None, ), # 21
+ None, # 21
(22, TType.I32, 'memtable_throughput_in_mb', None, None, ), # 22
(23, TType.DOUBLE, 'memtable_operations_in_millions', None, None, ), # 23
(24, TType.BOOL, 'replicate_on_write', None, None, ), # 24
@@ -2353,9 +2381,13 @@ class CfDef:
(26, TType.STRING, 'key_validation_class', None, None, ), # 26
(27, TType.STRING, 'row_cache_provider', None, "org.apache.cassandra.cache.ConcurrentLinkedHashCacheProvider", ), # 27
(28, TType.STRING, 'key_alias', None, None, ), # 28
+ (29, TType.STRING, 'compaction_strategy', None, None, ), # 29
+ (30, TType.MAP, 'compaction_strategy_options', (TType.STRING,None,TType.STRING,None), None, ), # 30
+ (31, TType.I32, 'row_cache_keys_to_save', None, None, ), # 31
+ (32, TType.MAP, 'compression_options', (TType.STRING,None,TType.STRING,None), None, ), # 32
)
- def __init__(self, keyspace=None, name=None, column_type=thrift_spec[3][4], comparator_type=thrift_spec[5][4], subcomparator_type=None, comment=None, row_cache_size=thrift_spec[9][4], key_cache_size=thrift_spec[11][4], read_repair_chance=thrift_spec[12][4], column_metadata=None, gc_grace_seconds=None, default_validation_class=None, id=None, min_compaction_threshold=None, max_compaction_threshold=None, row_cache_save_period_in_seconds=None, key_cache_save_period_in_seconds=None, memtable_flush_after_mins=None, memtable_throughput_in_mb=None, memtable_operations_in_millions=None, replicate_on_write=None, merge_shards_chance=None, key_validation_class=None, row_cache_provider=thrift_spec[27][4], key_alias=None,):
+ def __init__(self, keyspace=None, name=None, column_type=thrift_spec[3][4], comparator_type=thrift_spec[5][4], subcomparator_type=None, comment=None, row_cache_size=thrift_spec[9][4], key_cache_size=thrift_spec[11][4], read_repair_chance=thrift_spec[12][4], column_metadata=None, gc_grace_seconds=None, default_validation_class=None, id=None, min_compaction_threshold=None, max_compaction_threshold=None, row_cache_save_period_in_seconds=None, key_cache_save_period_in_seconds=None, memtable_throughput_in_mb=None, memtable_operations_in_millions=None, replicate_on_write=None, merge_shards_chance=None, key_validation_class=None, row_cache_provider=thrift_spec[27][4], key_alias=None, compaction_strategy=None, compaction_strategy_options=None, row_cache_keys_to_save=None, compression_options=None,):
self.keyspace = keyspace
self.name = name
self.column_type = column_type
@@ -2373,7 +2405,6 @@ class CfDef:
self.max_compaction_threshold = max_compaction_threshold
self.row_cache_save_period_in_seconds = row_cache_save_period_in_seconds
self.key_cache_save_period_in_seconds = key_cache_save_period_in_seconds
- self.memtable_flush_after_mins = memtable_flush_after_mins
self.memtable_throughput_in_mb = memtable_throughput_in_mb
self.memtable_operations_in_millions = memtable_operations_in_millions
self.replicate_on_write = replicate_on_write
@@ -2381,6 +2412,10 @@ class CfDef:
self.key_validation_class = key_validation_class
self.row_cache_provider = row_cache_provider
self.key_alias = key_alias
+ self.compaction_strategy = compaction_strategy
+ self.compaction_strategy_options = compaction_strategy_options
+ self.row_cache_keys_to_save = row_cache_keys_to_save
+ self.compression_options = compression_options
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -2439,11 +2474,11 @@ class CfDef:
elif fid == 13:
if ftype == TType.LIST:
self.column_metadata = []
- (_etype54, _size51) = iprot.readListBegin()
- for _i55 in xrange(_size51):
- _elem56 = ColumnDef()
- _elem56.read(iprot)
- self.column_metadata.append(_elem56)
+ (_etype63, _size60) = iprot.readListBegin()
+ for _i64 in xrange(_size60):
+ _elem65 = ColumnDef()
+ _elem65.read(iprot)
+ self.column_metadata.append(_elem65)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -2482,11 +2517,6 @@ class CfDef:
self.key_cache_save_period_in_seconds = iprot.readI32();
else:
iprot.skip(ftype)
- elif fid == 21:
- if ftype == TType.I32:
- self.memtable_flush_after_mins = iprot.readI32();
- else:
- iprot.skip(ftype)
elif fid == 22:
if ftype == TType.I32:
self.memtable_throughput_in_mb = iprot.readI32();
@@ -2522,6 +2552,38 @@ class CfDef:
self.key_alias = iprot.readString();
else:
iprot.skip(ftype)
+ elif fid == 29:
+ if ftype == TType.STRING:
+ self.compaction_strategy = iprot.readString();
+ else:
+ iprot.skip(ftype)
+ elif fid == 30:
+ if ftype == TType.MAP:
+ self.compaction_strategy_options = {}
+ (_ktype67, _vtype68, _size66 ) = iprot.readMapBegin()
+ for _i70 in xrange(_size66):
+ _key71 = iprot.readString();
+ _val72 = iprot.readString();
+ self.compaction_strategy_options[_key71] = _val72
+ iprot.readMapEnd()
+ else:
+ iprot.skip(ftype)
+ elif fid == 31:
+ if ftype == TType.I32:
+ self.row_cache_keys_to_save = iprot.readI32();
+ else:
+ iprot.skip(ftype)
+ elif fid == 32:
+ if ftype == TType.MAP:
+ self.compression_options = {}
+ (_ktype74, _vtype75, _size73 ) = iprot.readMapBegin()
+ for _i77 in xrange(_size73):
+ _key78 = iprot.readString();
+ _val79 = iprot.readString();
+ self.compression_options[_key78] = _val79
+ iprot.readMapEnd()
+ else:
+ iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
@@ -2571,8 +2633,8 @@ class CfDef:
if self.column_metadata != None:
oprot.writeFieldBegin('column_metadata', TType.LIST, 13)
oprot.writeListBegin(TType.STRUCT, len(self.column_metadata))
- for iter57 in self.column_metadata:
- iter57.write(oprot)
+ for iter80 in self.column_metadata:
+ iter80.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.gc_grace_seconds != None:
@@ -2603,10 +2665,6 @@ class CfDef:
oprot.writeFieldBegin('key_cache_save_period_in_seconds', TType.I32, 20)
oprot.writeI32(self.key_cache_save_period_in_seconds)
oprot.writeFieldEnd()
- if self.memtable_flush_after_mins != None:
- oprot.writeFieldBegin('memtable_flush_after_mins', TType.I32, 21)
- oprot.writeI32(self.memtable_flush_after_mins)
- oprot.writeFieldEnd()
if self.memtable_throughput_in_mb != None:
oprot.writeFieldBegin('memtable_throughput_in_mb', TType.I32, 22)
oprot.writeI32(self.memtable_throughput_in_mb)
@@ -2635,6 +2693,30 @@ class CfDef:
oprot.writeFieldBegin('key_alias', TType.STRING, 28)
oprot.writeString(self.key_alias)
oprot.writeFieldEnd()
+ if self.compaction_strategy != None:
+ oprot.writeFieldBegin('compaction_strategy', TType.STRING, 29)
+ oprot.writeString(self.compaction_strategy)
+ oprot.writeFieldEnd()
+ if self.compaction_strategy_options != None:
+ oprot.writeFieldBegin('compaction_strategy_options', TType.MAP, 30)
+ oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.compaction_strategy_options))
+ for kiter81,viter82 in self.compaction_strategy_options.items():
+ oprot.writeString(kiter81)
+ oprot.writeString(viter82)
+ oprot.writeMapEnd()
+ oprot.writeFieldEnd()
+ if self.row_cache_keys_to_save != None:
+ oprot.writeFieldBegin('row_cache_keys_to_save', TType.I32, 31)
+ oprot.writeI32(self.row_cache_keys_to_save)
+ oprot.writeFieldEnd()
+ if self.compression_options != None:
+ oprot.writeFieldBegin('compression_options', TType.MAP, 32)
+ oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.compression_options))
+ for kiter83,viter84 in self.compression_options.items():
+ oprot.writeString(kiter83)
+ oprot.writeString(viter84)
+ oprot.writeMapEnd()
+ oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
@@ -2662,8 +2744,9 @@ class KsDef:
- name
- strategy_class
- strategy_options
- - replication_factor: deprecated
+ - replication_factor: @deprecated
- cf_defs
+ - durable_writes
"""
thrift_spec = (
@@ -2673,14 +2756,16 @@ class KsDef:
(3, TType.MAP, 'strategy_options', (TType.STRING,None,TType.STRING,None), None, ), # 3
(4, TType.I32, 'replication_factor', None, None, ), # 4
(5, TType.LIST, 'cf_defs', (TType.STRUCT,(CfDef, CfDef.thrift_spec)), None, ), # 5
+ (6, TType.BOOL, 'durable_writes', None, True, ), # 6
)
- def __init__(self, name=None, strategy_class=None, strategy_options=None, replication_factor=None, cf_defs=None,):
+ def __init__(self, name=None, strategy_class=None, strategy_options=None, replication_factor=None, cf_defs=None, durable_writes=thrift_spec[6][4],):
self.name = name
self.strategy_class = strategy_class
self.strategy_options = strategy_options
self.replication_factor = replication_factor
self.cf_defs = cf_defs
+ self.durable_writes = durable_writes
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -2704,11 +2789,11 @@ class KsDef:
elif fid == 3:
if ftype == TType.MAP:
self.strategy_options = {}
- (_ktype59, _vtype60, _size58 ) = iprot.readMapBegin()
- for _i62 in xrange(_size58):
- _key63 = iprot.readString();
- _val64 = iprot.readString();
- self.strategy_options[_key63] = _val64
+ (_ktype86, _vtype87, _size85 ) = iprot.readMapBegin()
+ for _i89 in xrange(_size85):
+ _key90 = iprot.readString();
+ _val91 = iprot.readString();
+ self.strategy_options[_key90] = _val91
iprot.readMapEnd()
else:
iprot.skip(ftype)
@@ -2720,14 +2805,19 @@ class KsDef:
elif fid == 5:
if ftype == TType.LIST:
self.cf_defs = []
- (_etype68, _size65) = iprot.readListBegin()
- for _i69 in xrange(_size65):
- _elem70 = CfDef()
- _elem70.read(iprot)
- self.cf_defs.append(_elem70)
+ (_etype95, _size92) = iprot.readListBegin()
+ for _i96 in xrange(_size92):
+ _elem97 = CfDef()
+ _elem97.read(iprot)
+ self.cf_defs.append(_elem97)
iprot.readListEnd()
else:
iprot.skip(ftype)
+ elif fid == 6:
+ if ftype == TType.BOOL:
+ self.durable_writes = iprot.readBool();
+ else:
+ iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
@@ -2749,9 +2839,9 @@ class KsDef:
if self.strategy_options != None:
oprot.writeFieldBegin('strategy_options', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.strategy_options))
- for kiter71,viter72 in self.strategy_options.items():
- oprot.writeString(kiter71)
- oprot.writeString(viter72)
+ for kiter98,viter99 in self.strategy_options.items():
+ oprot.writeString(kiter98)
+ oprot.writeString(viter99)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.replication_factor != None:
@@ -2761,10 +2851,14 @@ class KsDef:
if self.cf_defs != None:
oprot.writeFieldBegin('cf_defs', TType.LIST, 5)
oprot.writeListBegin(TType.STRUCT, len(self.cf_defs))
- for iter73 in self.cf_defs:
- iter73.write(oprot)
+ for iter100 in self.cf_defs:
+ iter100.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
+ if self.durable_writes != None:
+ oprot.writeFieldBegin('durable_writes', TType.BOOL, 6)
+ oprot.writeBool(self.durable_writes)
+ oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
@@ -2824,11 +2918,11 @@ class CqlRow:
elif fid == 2:
if ftype == TType.LIST:
self.columns = []
- (_etype77, _size74) = iprot.readListBegin()
- for _i78 in xrange(_size74):
- _elem79 = Column()
- _elem79.read(iprot)
- self.columns.append(_elem79)
+ (_etype104, _size101) = iprot.readListBegin()
+ for _i105 in xrange(_size101):
+ _elem106 = Column()
+ _elem106.read(iprot)
+ self.columns.append(_elem106)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -2849,8 +2943,8 @@ class CqlRow:
if self.columns != None:
oprot.writeFieldBegin('columns', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.columns))
- for iter80 in self.columns:
- iter80.write(oprot)
+ for iter107 in self.columns:
+ iter107.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -2874,12 +2968,136 @@ class CqlRow:
def __ne__(self, other):
return not (self == other)
+class CqlMetadata:
+ """
+ Attributes:
+ - name_types
+ - value_types
+ - default_name_type
+ - default_value_type
+ """
+
+ thrift_spec = (
+ None, # 0
+ (1, TType.MAP, 'name_types', (TType.STRING,None,TType.STRING,None), None, ), # 1
+ (2, TType.MAP, 'value_types', (TType.STRING,None,TType.STRING,None), None, ), # 2
+ (3, TType.STRING, 'default_name_type', None, None, ), # 3
+ (4, TType.STRING, 'default_value_type', None, None, ), # 4
+ )
+
+ def __init__(self, name_types=None, value_types=None, default_name_type=None, default_value_type=None,):
+ self.name_types = name_types
+ self.value_types = value_types
+ self.default_name_type = default_name_type
+ self.default_value_type = default_value_type
+
+ def read(self, iprot):
+ if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+ fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+ return
+ iprot.readStructBegin()
+ while True:
+ (fname, ftype, fid) = iprot.readFieldBegin()
+ if ftype == TType.STOP:
+ break
+ if fid == 1:
+ if ftype == TType.MAP:
+ self.name_types = {}
+ (_ktype109, _vtype110, _size108 ) = iprot.readMapBegin()
+ for _i112 in xrange(_size108):
+ _key113 = iprot.readString();
+ _val114 = iprot.readString();
+ self.name_types[_key113] = _val114
+ iprot.readMapEnd()
+ else:
+ iprot.skip(ftype)
+ elif fid == 2:
+ if ftype == TType.MAP:
+ self.value_types = {}
+ (_ktype116, _vtype117, _size115 ) = iprot.readMapBegin()
+ for _i119 in xrange(_size115):
+ _key120 = iprot.readString();
+ _val121 = iprot.readString();
+ self.value_types[_key120] = _val121
+ iprot.readMapEnd()
+ else:
+ iprot.skip(ftype)
+ elif fid == 3:
+ if ftype == TType.STRING:
+ self.default_name_type = iprot.readString();
+ else:
+ iprot.skip(ftype)
+ elif fid == 4:
+ if ftype == TType.STRING:
+ self.default_value_type = iprot.readString();
+ else:
+ iprot.skip(ftype)
+ else:
+ iprot.skip(ftype)
+ iprot.readFieldEnd()
+ iprot.readStructEnd()
+
+ def write(self, oprot):
+ if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+ oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+ return
+ oprot.writeStructBegin('CqlMetadata')
+ if self.name_types != None:
+ oprot.writeFieldBegin('name_types', TType.MAP, 1)
+ oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.name_types))
+ for kiter122,viter123 in self.name_types.items():
+ oprot.writeString(kiter122)
+ oprot.writeString(viter123)
+ oprot.writeMapEnd()
+ oprot.writeFieldEnd()
+ if self.value_types != None:
+ oprot.writeFieldBegin('value_types', TType.MAP, 2)
+ oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.value_types))
+ for kiter124,viter125 in self.value_types.items():
+ oprot.writeString(kiter124)
+ oprot.writeString(viter125)
+ oprot.writeMapEnd()
+ oprot.writeFieldEnd()
+ if self.default_name_type != None:
+ oprot.writeFieldBegin('default_name_type', TType.STRING, 3)
+ oprot.writeString(self.default_name_type)
+ oprot.writeFieldEnd()
+ if self.default_value_type != None:
+ oprot.writeFieldBegin('default_value_type', TType.STRING, 4)
+ oprot.writeString(self.default_value_type)
+ oprot.writeFieldEnd()
+ oprot.writeFieldStop()
+ oprot.writeStructEnd()
+ def validate(self):
+ if self.name_types is None:
+ raise TProtocol.TProtocolException(message='Required field name_types is unset!')
+ if self.value_types is None:
+ raise TProtocol.TProtocolException(message='Required field value_types is unset!')
+ if self.default_name_type is None:
+ raise TProtocol.TProtocolException(message='Required field default_name_type is unset!')
+ if self.default_value_type is None:
+ raise TProtocol.TProtocolException(message='Required field default_value_type is unset!')
+ return
+
+
+ def __repr__(self):
+ L = ['%s=%r' % (key, value)
+ for key, value in self.__dict__.iteritems()]
+ return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+ def __ne__(self, other):
+ return not (self == other)
+
class CqlResult:
"""
Attributes:
- type
- rows
- num
+ - schema
"""
thrift_spec = (
@@ -2887,12 +3105,14 @@ class CqlResult:
(1, TType.I32, 'type', None, None, ), # 1
(2, TType.LIST, 'rows', (TType.STRUCT,(CqlRow, CqlRow.thrift_spec)), None, ), # 2
(3, TType.I32, 'num', None, None, ), # 3
+ (4, TType.STRUCT, 'schema', (CqlMetadata, CqlMetadata.thrift_spec), None, ), # 4
)
- def __init__(self, type=None, rows=None, num=None,):
+ def __init__(self, type=None, rows=None, num=None, schema=None,):
self.type = type
self.rows = rows
self.num = num
+ self.schema = schema
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -2911,11 +3131,11 @@ class CqlResult:
elif fid == 2:
if ftype == TType.LIST:
self.rows = []
- (_etype84, _size81) = iprot.readListBegin()
- for _i85 in xrange(_size81):
- _elem86 = CqlRow()
- _elem86.read(iprot)
- self.rows.append(_elem86)
+ (_etype129, _size126) = iprot.readListBegin()
+ for _i130 in xrange(_size126):
+ _elem131 = CqlRow()
+ _elem131.read(iprot)
+ self.rows.append(_elem131)
iprot.readListEnd()
else:
iprot.skip(ftype)
@@ -2924,6 +3144,12 @@ class CqlResult:
self.num = iprot.readI32();
else:
iprot.skip(ftype)
+ elif fid == 4:
+ if ftype == TType.STRUCT:
+ self.schema = CqlMetadata()
+ self.schema.read(iprot)
+ else:
+ iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
@@ -2941,14 +3167,18 @@ class CqlResult:
if self.rows != None:
oprot.writeFieldBegin('rows', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.rows))
- for iter87 in self.rows:
- iter87.write(oprot)
+ for iter132 in self.rows:
+ iter132.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.num != None:
oprot.writeFieldBegin('num', TType.I32, 3)
oprot.writeI32(self.num)
oprot.writeFieldEnd()
+ if self.schema != None:
+ oprot.writeFieldBegin('schema', TType.STRUCT, 4)
+ self.schema.write(oprot)
+ oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
Modified: cassandra/trunk/drivers/py/cql/cursor.py
URL: http://svn.apache.org/viewvc/cassandra/trunk/drivers/py/cql/cursor.py?rev=1165750&r1=1165749&r2=1165750&view=diff
==============================================================================
--- cassandra/trunk/drivers/py/cql/cursor.py (original)
+++ cassandra/trunk/drivers/py/cql/cursor.py Tue Sep 6 16:21:29 2011
@@ -53,10 +53,7 @@ class Cursor:
self.arraysize = 1
self.rowcount = -1 # Populate on execute()
self.compression = 'GZIP'
-
- self._query_ks = self._connection.keyspace
- self._query_cf = None
- self.decoder = SchemaDecoder(self.__get_schema())
+ self.decoder = None
###
# Cursor API
@@ -66,49 +63,7 @@ class Cursor:
self.open_socket = False
def prepare(self, query, params):
- prepared_query = prepare(query, params)
- self._schema_update_needed = False
-
- # Snag the keyspace or column family and stash it for later use in
- # decoding columns. These regexes don't match every query, but the
- # current column family only needs to be current for SELECTs.
- match = Cursor._cfamily_re.match(prepared_query)
- if match:
- self._query_cf = match.group(1)
- return prepared_query
- match = Cursor._keyspace_re.match(prepared_query)
- if match:
- self._query_ks = match.group(1)
- return prepared_query
-
- # If this is a CREATE, then refresh the schema for decoding purposes.
- match = Cursor._ddl_re.match(prepared_query)
- if match:
- self._schema_update_needed = True
- return prepared_query
-
- def __get_schema(self):
- def columns(metadata):
- results = {}
- for col in metadata:
- results[col.name] = col.validation_class
- return results
-
- def column_families(cf_defs):
- d = {}
- for cf in cf_defs:
- d[cf.name] = {'comparator': cf.comparator_type,
- 'default_validation_class': cf.default_validation_class,
- 'key_validation_class': cf.key_validation_class,
- 'columns': columns(cf.column_metadata),
- 'key_alias': cf.key_alias}
- return d
-
- schema = {}
- client = self._connection.client
- for ksdef in client.describe_keyspaces():
- schema[ksdef.name] = column_families(ksdef.cf_defs)
- return schema
+ return prepare(query, params)
def execute(self, cql_query, params={}):
self.__checksock()
@@ -142,15 +97,13 @@ class Cursor:
except TApplicationException, tapp:
raise cql.InternalError("Internal application error")
- if self._schema_update_needed and isinstance(self.decoder, SchemaDecoder):
- self.decoder.schema = self.__get_schema()
-
if response.type == CqlResultType.ROWS:
+ self.decoder = SchemaDecoder(response.schema)
self.result = response.rows
self.rs_idx = 0
self.rowcount = len(self.result)
if self.result:
- self.description = self.decoder.decode_description(self._query_ks, self._query_cf, self.result[0])
+ self.description = self.decoder.decode_description(self.result[0])
elif response.type == CqlResultType.INT:
self.result = [(response.num,)]
self.rs_idx = 0
@@ -191,8 +144,8 @@ class Cursor:
if self.description == _COUNT_DESCRIPTION:
return row
else:
- self.description = self.decoder.decode_description(self._query_ks, self._query_cf, row)
- return self.decoder.decode_row(self._query_ks, self._query_cf, row)
+ self.description = self.decoder.decode_description(row)
+ return self.decoder.decode_row(row)
def fetchmany(self, size=None):
self.__checksock()
@@ -203,7 +156,7 @@ class Cursor:
while len(L) < size and self.rs_idx < len(self.result):
row = self.result[self.rs_idx]
self.rs_idx += 1
- L.append(self.decoder.decode_row(self._query_ks, self._query_cf, row))
+ L.append(self.decoder.decode_row(row))
return L
def fetchall(self):
Modified: cassandra/trunk/drivers/py/cql/decoders.py
URL: http://svn.apache.org/viewvc/cassandra/trunk/drivers/py/cql/decoders.py?rev=1165750&r1=1165749&r2=1165750&view=diff
==============================================================================
--- cassandra/trunk/drivers/py/cql/decoders.py (original)
+++ cassandra/trunk/drivers/py/cql/decoders.py Tue Sep 6 16:21:29 2011
@@ -22,47 +22,30 @@ class SchemaDecoder(object):
"""
Decode binary column names/values according to schema.
"""
- def __init__(self, schema={}):
+ def __init__(self, schema):
self.schema = schema
- def __get_column_family_def(self, keyspace, column_family):
- if keyspace in self.schema and column_family in self.schema[keyspace]:
- return self.schema[keyspace][column_family]
- return None
-
- def __comparator_for(self, keyspace, column_family):
- cfam = self.__get_column_family_def(keyspace, column_family)
- return cfam.get("comparator", None)
-
- def decode_description(self, keyspace, column_family, row):
+ def decode_description(self, row):
+ schema = self.schema
description = []
- comparator = self.__comparator_for(keyspace, column_family)
- unmarshal = unmarshallers.get(comparator, unmarshal_noop)
for column in row.columns:
- if column.name == self.__get_column_family_def(keyspace, column_family)['key_alias']:
- description.append((column.name, 'text', None, None, None, None, True))
- else:
- description.append((unmarshal(column.name), comparator, None, None, None, None, True))
+ name = column.name
+ comparator = schema.name_types.get(name, schema.default_name_type)
+ unmarshal = unmarshallers.get(comparator, unmarshal_noop)
+ description.append((unmarshal(name), comparator, None, None, None, None, True))
return description
- def decode_row(self, keyspace, column_family, row):
- cfdef = self.__get_column_family_def(keyspace, column_family)
- key_alias = cfdef['key_alias']
- validators = cfdef['columns']
- default_validator = cfdef['default_validation_class']
- key_validator = cfdef.get("key_validation_class", None)
-
+ def decode_row(self, row):
+ schema = self.schema
values = []
for column in row.columns:
- name = column.name
if column.value is None:
values.append(None)
continue
- if name == key_alias:
- validator = key_validator
- else:
- validator = validators.get(name, default_validator)
+ name = column.name
+ validator = schema.value_types.get(name, schema.default_value_type)
+ unmarshal = unmarshallers.get(validator, unmarshal_noop)
values.append(unmarshallers.get(validator, unmarshal_noop)(column.value))
return values
Modified: cassandra/trunk/drivers/py/cql/marshal.py
URL: http://svn.apache.org/viewvc/cassandra/trunk/drivers/py/cql/marshal.py?rev=1165750&r1=1165749&r2=1165750&view=diff
==============================================================================
--- cassandra/trunk/drivers/py/cql/marshal.py (original)
+++ cassandra/trunk/drivers/py/cql/marshal.py Tue Sep 6 16:21:29 2011
@@ -86,6 +86,9 @@ unmarshallers = {BYTES_TYPE: un
LEXICAL_UUID_TYPE: unmarshal_uuid,
TIME_UUID_TYPE: unmarshal_uuid,
COUNTER_COLUMN_TYPE: unmarshal_long}
+for name, typ in unmarshallers.items():
+ short_name = name.split('.')[-1]
+ unmarshallers[short_name] = typ
def decode_bigint(term):
val = int(term.encode('hex'), 16)
Modified: cassandra/trunk/interface/cassandra.thrift
URL: http://svn.apache.org/viewvc/cassandra/trunk/interface/cassandra.thrift?rev=1165750&r1=1165749&r2=1165750&view=diff
==============================================================================
--- cassandra/trunk/interface/cassandra.thrift (original)
+++ cassandra/trunk/interface/cassandra.thrift Tue Sep 6 16:21:29 2011
@@ -46,7 +46,7 @@ namespace rb CassandraThrift
# for every edit that doesn't result in a change to major/minor.
#
# See the Semantic Versioning Specification (SemVer) http://semver.org.
-const string VERSION = "19.12.0"
+const string VERSION = "19.13.0"
#
@@ -433,10 +433,18 @@ struct CqlRow {
2: required list<Column> columns
}
+struct CqlMetadata {
+ 1: required map<binary,string> name_types,
+ 2: required map<binary,string> value_types,
+ 3: required string default_name_type,
+ 4: required string default_value_type
+}
+
struct CqlResult {
1: required CqlResultType type,
2: optional list<CqlRow> rows,
- 3: optional i32 num
+ 3: optional i32 num,
+ 4: optional CqlMetadata schema
}
service Cassandra {