You are viewing a plain text version of this content. The canonical link for it is here.
Posted to solr-commits@lucene.apache.org by sh...@apache.org on 2008/12/13 18:38:00 UTC
svn commit: r726241 - in /lucene/solr/trunk/contrib/dataimporthandler:
CHANGES.txt
src/main/java/org/apache/solr/handler/dataimport/DataImporter.java
src/main/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
Author: shalin
Date: Sat Dec 13 09:38:00 2008
New Revision: 726241
URL: http://svn.apache.org/viewvc?rev=726241&view=rev
Log:
SOLR-812 -- Configurable JDBC settings in JdbcDataSource including optimized defaults for read only mode
Modified:
lucene/solr/trunk/contrib/dataimporthandler/CHANGES.txt
lucene/solr/trunk/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImporter.java
lucene/solr/trunk/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
Modified: lucene/solr/trunk/contrib/dataimporthandler/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/solr/trunk/contrib/dataimporthandler/CHANGES.txt?rev=726241&r1=726240&r2=726241&view=diff
==============================================================================
--- lucene/solr/trunk/contrib/dataimporthandler/CHANGES.txt (original)
+++ lucene/solr/trunk/contrib/dataimporthandler/CHANGES.txt Sat Dec 13 09:38:00 2008
@@ -41,6 +41,9 @@
7. SOLR-891: A Transformer to read strings from Clob type.
(Noble Paul via shalin)
+8. SOLR-812: Configurable JDBC settings in JdbcDataSource including optimized defaults for read only mode.
+ (David Smiley, Glen Newton, shalin)
+
Optimizations
----------------------
1. SOLR-846: Reduce memory consumption during delta import by removing keys when used
Modified: lucene/solr/trunk/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImporter.java
URL: http://svn.apache.org/viewvc/lucene/solr/trunk/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImporter.java?rev=726241&r1=726240&r2=726241&view=diff
==============================================================================
--- lucene/solr/trunk/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImporter.java (original)
+++ lucene/solr/trunk/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/DataImporter.java Sat Dec 13 09:38:00 2008
@@ -19,7 +19,6 @@
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
-import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.slf4j.Logger;
@@ -37,9 +36,7 @@
import java.util.concurrent.locks.ReentrantLock;
/**
- * <p>
- * Stores all configuration information for pulling and indexing data.
- * </p>
+ * <p> Stores all configuration information for pulling and indexing data. </p>
* <p/>
* <b>This API is experimental and subject to change</b>
*
@@ -129,7 +126,7 @@
SchemaField field = schema.getFieldOrNull(fld.getName());
if (field == null) {
field = config.lowerNameVsSchemaField.get(fld.getName().toLowerCase());
- if (field == null) {
+ if (field == null) {
errors.add("The field :" + fld.getName() + " present in DataConfig does not have a counterpart in Solr Schema");
}
}
@@ -147,13 +144,14 @@
}
- /**Used by tests
+ /**
+ * Used by tests
*/
- void loadAndInit(String configStr){
+ void loadAndInit(String configStr) {
loadDataConfig(configStr);
Map<String, DataConfig.Field> fields = new HashMap<String, DataConfig.Field>();
DataConfig.Entity e = getConfig().documents.get(0).entities.get(0);
- initEntity(e, fields, false);
+ initEntity(e, fields, false);
}
void loadDataConfig(String configFile) {
@@ -191,9 +189,9 @@
for (DataConfig.Field f : e.fields) {
if (schema != null) {
SchemaField schemaField = schema.getFieldOrNull(f.getName());
- if (schemaField == null) {
+ if (schemaField == null) {
schemaField = config.lowerNameVsSchemaField.get(f.getName().toLowerCase());
- if(schemaField != null) f.name = schemaField.getName();
+ if (schemaField != null) f.name = schemaField.getName();
}
if (schemaField != null) {
f.multiValued = schemaField.multiValued();
@@ -282,6 +280,14 @@
try {
Properties copyProps = new Properties();
copyProps.putAll(p);
+ Map<String, Object> map = ctx.getRequestParameters();
+ if (map.containsKey("rows")) {
+ int rows = Integer.parseInt((String) map.get("rows"));
+ if (map.containsKey("start")) {
+ rows += Integer.parseInt((String) map.get("start"));
+ }
+ copyProps.setProperty("maxRows", String.valueOf(rows));
+ }
dataSrc.init(ctx, copyProps);
} catch (Exception e) {
throw new DataImportHandlerException(DataImportHandlerException.SEVERE,
Modified: lucene/solr/trunk/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
URL: http://svn.apache.org/viewvc/lucene/solr/trunk/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/JdbcDataSource.java?rev=726241&r1=726240&r2=726241&view=diff
==============================================================================
--- lucene/solr/trunk/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/JdbcDataSource.java (original)
+++ lucene/solr/trunk/contrib/dataimporthandler/src/main/java/org/apache/solr/handler/dataimport/JdbcDataSource.java Sat Dec 13 09:38:00 2008
@@ -17,23 +17,17 @@
package org.apache.solr.handler.dataimport;
import org.apache.solr.common.SolrException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.sql.*;
import java.util.*;
import java.util.concurrent.Callable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
- * <p>
- * A DataSource implementation which can fetch data using JDBC.
- * </p>
- * <p/>
- * <p>
- * Refer to <a
- * href="http://wiki.apache.org/solr/DataImportHandler">http://wiki.apache.org/solr/DataImportHandler</a>
- * for more details.
- * </p>
+ * <p> A DataSource implementation which can fetch data using JDBC. </p> <p/> <p> Refer to <a
+ * href="http://wiki.apache.org/solr/DataImportHandler">http://wiki.apache.org/solr/DataImportHandler</a> for more
+ * details. </p>
* <p/>
* <b>This API is experimental and may change in the future.</b>
*
@@ -56,6 +50,8 @@
private int batchSize = FETCH_SIZE;
+ private int maxRows = 0;
+
public void init(Context context, Properties initProps) {
Object o = initProps.get(CONVERT_TYPE);
if (o != null)
@@ -70,7 +66,7 @@
if (batchSize == -1)
batchSize = Integer.MIN_VALUE;
} catch (NumberFormatException e) {
- LOG.warn( "Invalid batch size: " + bsz);
+ LOG.warn("Invalid batch size: " + bsz);
}
}
@@ -114,6 +110,11 @@
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Driver must be specified");
}
+ String s = initProps.getProperty("maxRows");
+ if (s != null) {
+ maxRows = Integer.parseInt(s);
+ }
+
factory = new Callable<Connection>() {
public Connection call() throws Exception {
LOG.info("Creating a connection for entity "
@@ -123,6 +124,34 @@
Connection c = null;
try {
c = DriverManager.getConnection(url, initProps);
+ if (Boolean.parseBoolean(initProps.getProperty("readOnly"))) {
+ c.setReadOnly(true);
+ // Add other sane defaults
+ c.setAutoCommit(true);
+ c.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED);
+ c.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT);
+ }
+ if (!Boolean.parseBoolean(initProps.getProperty("autoCommit"))) {
+ c.setAutoCommit(false);
+ }
+ String transactionIsolation = initProps.getProperty("transactionIsolation");
+ if ("TRANSACTION_READ_UNCOMMITTED".equals(transactionIsolation)) {
+ c.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED);
+ } else if ("TRANSACTION_READ_COMMITTED".equals(transactionIsolation)) {
+ c.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);
+ } else if ("TRANSACTION_REPEATABLE_READ".equals(transactionIsolation)) {
+ c.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ);
+ } else if ("TRANSACTION_SERIALIZABLE".equals(transactionIsolation)) {
+ c.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE);
+ } else if ("TRANSACTION_NONE".equals(transactionIsolation)) {
+ c.setTransactionIsolation(Connection.TRANSACTION_NONE);
+ }
+ String holdability = initProps.getProperty("holdability");
+ if ("CLOSE_CURSORS_AT_COMMIT".equals(holdability)) {
+ c.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT);
+ } else {
+ c.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
+ }
} catch (SQLException e) {
// DriverManager does not allow you to use a driver which is not loaded through
// the class loader of the class which is trying to make the connection.
@@ -144,7 +173,7 @@
}
private void logError(String msg, Exception e) {
- LOG.warn( msg, e);
+ LOG.warn(msg, e);
}
private List<String> readFieldNames(ResultSetMetaData metaData)
@@ -170,9 +199,9 @@
try {
Connection c = getConnection();
- stmt = c.createStatement(ResultSet.TYPE_FORWARD_ONLY,
- ResultSet.CONCUR_READ_ONLY);
+ stmt = c.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
stmt.setFetchSize(batchSize);
+ stmt.setMaxRows(maxRows);
LOG.debug("Executing SQL: " + query);
long start = System.currentTimeMillis();
if (stmt.execute(query)) {