You are viewing a plain text version of this content. The canonical link for it is here.
Posted to xindice-dev@xml.apache.org by vg...@apache.org on 2007/04/04 00:09:05 UTC
svn commit: r525304 - in /xml/xindice/trunk: bin/xindice_rebuild
java/src/org/apache/xindice/core/filer/HashFiler.java
java/src/org/apache/xindice/tools/DatabaseRebuild.java status.xml
Author: vgritsenko
Date: Tue Apr 3 15:09:04 2007
New Revision: 525304
URL: http://svn.apache.org/viewvc?view=rev&rev=525304
Log:
<action dev="VG" type="update" fixes-bug="41854" due-to="Natalia Shilenkova">
Add support for HashFiler in database rebuild tool.
</action>
Added:
xml/xindice/trunk/bin/xindice_rebuild (with props)
Modified:
xml/xindice/trunk/java/src/org/apache/xindice/core/filer/HashFiler.java
xml/xindice/trunk/java/src/org/apache/xindice/tools/DatabaseRebuild.java
xml/xindice/trunk/status.xml
Added: xml/xindice/trunk/bin/xindice_rebuild
URL: http://svn.apache.org/viewvc/xml/xindice/trunk/bin/xindice_rebuild?view=auto&rev=525304
==============================================================================
--- xml/xindice/trunk/bin/xindice_rebuild (added)
+++ xml/xindice/trunk/bin/xindice_rebuild Tue Apr 3 15:09:04 2007
@@ -0,0 +1,84 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# $Id: xindice 523149 2007-03-28 02:53:46Z vgritsenko $
+
+# -----------------------------------------------------------------------------
+# Xindice @VERSION@ Database Rebuild Tool Unix Shell Script
+# -----------------------------------------------------------------------------
+
+# ----- OS specific support ----------------------------------------------------
+
+cygwin=false;
+darwin=false;
+case "`uname`" in
+ CYGWIN*) cygwin=true ;;
+ Darwin*) darwin=true
+ if [ -z "$JAVA_HOME" ] ; then
+ JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Home
+ fi
+ ;;
+esac
+
+# ----- Verify and Set Required Environment Variables -------------------------
+
+if [ "$JAVA_HOME" = "" ] ; then
+ echo You must set JAVA_HOME to point at your Java Development Kit installation
+ exit 1
+fi
+
+if [ "$XINDICE_HOME" = "" ] ; then
+ XINDICE_HOME=`dirname $0`/..
+ if [ ! -f $XINDICE_HOME/xindice-1*.jar ] ; then
+ echo ERROR: You must set XINDICE_HOME to point at your
+ echo Xindice installation directory.
+ exit 2
+ fi
+fi
+
+if [ "$LOGGER" = "" ] ; then LOGGER=org.apache.commons.logging.impl.SimpleLog ; fi
+if [ "$LOGLEVEL" = "" ] ; then LOGLEVEL=INFO ; fi
+
+
+# ----- Set Classpath ----------------------------------------------------------
+
+CP=
+for i in `ls $XINDICE_HOME/lib/*.jar` ; do CP=$CP:$i ; done
+for i in `ls $XINDICE_HOME/xindice*.jar` ; do CP=$CP:$i ; done
+
+if [ "$3" = "-backup" ]; then
+ if [ -n "$2" -a -e ${2}.backup ]; then
+ echo "Cannot back up a database. ${2}.backup already exists"
+ exit 1
+ fi
+
+ if [ -n "$2" -a -d "$2" ]; then
+ echo "Creating backup..."
+ cp -r $2 ${2}.backup
+ fi
+fi
+
+# ----- Run Tools --------------------------------------------------------------
+
+JAVACMD=$JAVA_HOME/bin/java
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin; then
+ CP=`cygpath --path --windows "$CP"`
+ XINDICE_HOME=`cygpath --path --windows "$XINDICE_HOME"`
+fi
+
+$JAVACMD -Xms16m -Xmx128m -Dorg.apache.commons.logging.Log="$LOGGER" -Dorg.apache.commons.logging.simplelog.defaultlog="$LOGLEVEL" -classpath "$CP" org.apache.xindice.tools.DatabaseRebuild $* 2>>"$XINDICE_HOME"/logs/rebuild.log
Propchange: xml/xindice/trunk/bin/xindice_rebuild
------------------------------------------------------------------------------
svn:executable = *
Modified: xml/xindice/trunk/java/src/org/apache/xindice/core/filer/HashFiler.java
URL: http://svn.apache.org/viewvc/xml/xindice/trunk/java/src/org/apache/xindice/core/filer/HashFiler.java?view=diff&rev=525304&r1=525303&r2=525304
==============================================================================
--- xml/xindice/trunk/java/src/org/apache/xindice/core/filer/HashFiler.java (original)
+++ xml/xindice/trunk/java/src/org/apache/xindice/core/filer/HashFiler.java Tue Apr 3 15:09:04 2007
@@ -71,8 +71,8 @@
* @deprecated This class has been temporarily deprecated by BTreeFiler.
* @version $Revision$, $Date$
*/
-public final class HashFiler extends Paged
- implements Filer {
+public class HashFiler extends Paged
+ implements Filer {
private static final Log log = LogFactory.getLog(HashFiler.class);
@@ -426,7 +426,7 @@
/**
* HashPageHeader
*/
- private final class HashPageHeader extends PageHeader {
+ protected final class HashPageHeader extends PageHeader {
private long created = 0;
private long modified = 0;
private long nextCollision = NO_PAGE;
Modified: xml/xindice/trunk/java/src/org/apache/xindice/tools/DatabaseRebuild.java
URL: http://svn.apache.org/viewvc/xml/xindice/trunk/java/src/org/apache/xindice/tools/DatabaseRebuild.java?view=diff&rev=525304&r1=525303&r2=525304
==============================================================================
--- xml/xindice/trunk/java/src/org/apache/xindice/tools/DatabaseRebuild.java (original)
+++ xml/xindice/trunk/java/src/org/apache/xindice/tools/DatabaseRebuild.java Tue Apr 3 15:09:04 2007
@@ -19,195 +19,360 @@
package org.apache.xindice.tools;
-import org.apache.xindice.xml.dom.DOMParser;
-import org.apache.xindice.core.filer.BTreeFiler;
-import org.apache.xindice.core.filer.BTreeCallback;
-import org.apache.xindice.core.data.Value;
-import org.apache.xindice.core.data.Key;
-import org.apache.xindice.core.Database;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.xindice.core.Collection;
import org.apache.xindice.core.DBException;
+import org.apache.xindice.core.Database;
+import org.apache.xindice.core.data.Key;
+import org.apache.xindice.core.data.Value;
+import org.apache.xindice.core.filer.BTreeCallback;
+import org.apache.xindice.core.filer.BTreeException;
+import org.apache.xindice.core.filer.BTreeFiler;
+import org.apache.xindice.core.filer.Filer;
+import org.apache.xindice.core.filer.HashFiler;
import org.apache.xindice.core.indexer.Indexer;
import org.apache.xindice.util.Configuration;
import org.apache.xindice.util.XindiceException;
+import org.apache.xindice.xml.dom.DOMParser;
+import org.apache.xindice.server.Xindice;
import java.io.File;
import java.io.IOException;
/**
- * Command line utility to re-build all btree filers of the database.
+ * Command line utility to re-build all btree and hash filer based collections
+ * of the database, and re-index.
*
* @version $Revision$, $Date$
*/
public class DatabaseRebuild {
+ private static final Log log = LogFactory.getLog(DatabaseRebuild.class);
+
+ private static final int CMD_ALL = 0;
private static final int CMD_COPY = 1;
private static final int CMD_INDEX = 2;
- private static Database db;
- private static String path;
- private static boolean removeBackupFiles;
- private static int command;
+ private static final char NOT_RAN = '_';
+ private static final char SUCCESS = '*';
+ private static final char ERROR = '!';
public static void main(String[] args) throws Exception {
- if (args.length < 2 || !("copy".equals(args[0]) || "index".equals(args[0])) ||
- args[1] == null || args[1].length() == 0) {
+ if (args.length < 2 || args[1] == null || args[1].length() == 0 ||
+ !("copy".equals(args[0]) || "index".equals(args[0]) || "rebuild".equals(args[0]))) {
usage();
- return;
+ System.exit(1);
+ }
+
+ int command;
+ if ("copy".equals(args[0])) {
+ command = CMD_COPY;
+ } else if ("index".equals(args[0])) {
+ command = CMD_INDEX;
+ } else {
+ command = CMD_ALL;
}
- command = "copy".equals(args[0]) ? CMD_COPY : CMD_INDEX;
String dbLocation = args[1];
- if (args.length > 2 && args[2].equals("--remove")) {
- removeBackupFiles = true;
+
+ File location = new File(dbLocation);
+ dbLocation = location.getAbsolutePath();
+ String name = location.getName();
+
+ if ("".equals(name) || !location.exists() || !location.isDirectory()) {
+ System.out.println("Database path must point to existing database directory");
+ System.exit(1);
}
- String config = "<root-collection dbroot='" + dbLocation + "/' name='" + dbLocation + "'/>";
- db = new Database();
+ // create minimal database configuration instead of trying to locate system.xml
+ String config = "<root-collection dbroot='" + dbLocation + "/' name='" + name + "'/>";
+ Database db = new Database();
+ boolean status = true;
try {
+ System.out.println();
+ System.out.println("CI\tCollection name");
+
db.setConfig(new Configuration(DOMParser.toDocument(config)));
- path = db.getCollectionRoot().getPath();
- processChildCollections("/");
+ if (log.isInfoEnabled()) {
+ log.info("Rebuilding collections...");
+ }
+ status = processCollection(db, command);
} finally {
db.close();
+
+ System.out.println();
+ if (status) {
+ System.out.println("Rebuilding database was successfull");
+ } else {
+ System.out.println("Rebuilding database failed. Please check logs for more detail");
+ }
+
+ System.exit(status ? 0 : 2);
}
+
}
private static void usage() {
- System.out.println("Commands:");
- System.out.println("copy <db location> [--remove]");
- System.out.println("index <db location>");
+ System.out.println("Xindice " + Xindice.Version + " Database Rebuild Utility");
+ System.out.println("Usage:");
+ System.out.println("\txindice_rebuild copy <db location>");
+ System.out.println("\txindice_rebuild index <db location>");
+ System.out.println("\txindice_rebuild rebuild <db location>");
+ System.out.println();
+ System.out.println("DB Location should point to the directory containing Xindice database files.");
+ System.out.println();
+ System.out.println("Important: Shutdown and backup database before proceeding!");
+ System.out.println();
}
- private static void processChildCollections(String colRoot) {
- System.out.println("Getting child collections for " + colRoot);
+ private static boolean processCollection(Collection col, int command) {
+ String name = col.getCanonicalName();
+ boolean status;
+
try {
- // Get a Collection reference
- Collection col = db.getCollection(colRoot);
- if (col == null) {
- System.out.println("Error fetching collection '" + colRoot + "'");
- return;
+ if (log.isInfoEnabled()) {
+ log.info("Processing collection " + name);
+ }
+
+ char copy = NOT_RAN;
+ char index = NOT_RAN;
+ switch (command) {
+ case CMD_COPY:
+ status = rebuildCollection(col);
+ copy = status ? SUCCESS : ERROR;
+ break;
+
+ case CMD_INDEX:
+ status = rebuildIndex(col);
+ index = status ? SUCCESS : ERROR;
+ break;
+ default:
+ status = rebuildCollection(col);
+ copy = status ? SUCCESS : ERROR;
+ if (status) {
+ status = rebuildIndex(col);
+ index = status ? SUCCESS : ERROR;
+ }
+ break;
}
- processCollection(col, colRoot);
+ System.out.println(String.valueOf(copy) + String.valueOf(index) + "\t" + name);
String[] colNames = col.listCollections();
for (int i = 0; i < colNames.length; i++) {
- processChildCollections(colRoot + colNames[i] + "/");
+ boolean result = processCollection(col.getCollection(colNames[i]), command);
+ status = status && result;
}
- } catch (Exception e) {
- System.out.println("Got an excefption when processing collection: " + colRoot);
- e.printStackTrace();
+ } catch (DBException e) {
+ log.error("Got an exception when processing collection " + name, e);
+
+ return false;
}
+
+ return status;
}
- private static void processCollection(Collection col, String location) throws XindiceException, IOException {
- switch (command) {
- case CMD_INDEX:
- rebuildIndex(col);
- break;
+ private static boolean rebuildCollection(Collection col) {
+ String canonicalName = col.getCanonicalName();
- case CMD_COPY:
- rebuildCollection(col, location, col.getName());
- break;
+ // close collection's filer
+ try {
+ if (col.getFiler() != null) {
+ col.getFiler().close();
+ }
+ } catch (DBException e) {
+ log.error("Could not close filer for collection " + canonicalName, e);
+ return false;
}
- }
- private static void rebuildCollection(Collection col, String location, String name) throws XindiceException, IOException {
- if (!(col.getFiler() instanceof BTreeFiler)) {
- System.out.println("Filer for collection " + location + " is not BTreeFiler. Skipping...");
- return;
- }
+ // prepare
+ FilerCopy oldFiler;
+ FilerCopy newFiler;
+ if (col.getFiler() instanceof BTreeFiler) {
+ oldFiler = new BTreeCopy();
+ newFiler = new BTreeCopy();
+ } else if (col.getFiler() instanceof HashFiler) {
+ oldFiler = new HashCopy();
+ newFiler = new HashCopy();
+ } else {
+ if (log.isInfoEnabled()) {
+ log.info("Filer for collection " + col.getCanonicalName() + " is neither BTreeFiler nor HashFiler. Skipping...");
+ }
- // close collection and its filer
- col.close();
+ return true;
+ }
- System.out.println("Processing collection " + location);
+ String oldFileName;
+ String newFileName;
+ try {
+ oldFiler.setLocation(col.getCollectionRoot(), col.getName());
+ oldFiler.setConfig(col.getFiler().getConfig());
+ oldFileName = oldFiler.getFilerFile().getAbsolutePath();
+ if (!oldFiler.exists()) {
+ log.error("Filer for " + oldFileName + " does not exists");
+ return false;
+ }
- File root = new File(path + location);
+ newFiler.setLocation(col.getCollectionRoot(), col.getName() + ".rebuild");
+ newFiler.setConfig(col.getFiler().getConfig());
+ newFileName = newFiler.getFilerFile().getAbsolutePath();
+ if (newFiler.exists()) {
+ log.error("Filer for " + newFileName + " already exists");
+ return false;
+ }
+ } catch (XindiceException e) {
+ log.error("Got an exception when preparing to rebuild " + canonicalName, e);
+ return false;
+ }
- // backup
- String fileName = path + location + "/" + name;
- // FIXME What if copy fails. It's probably a better idea to first make a copy,
- // and rename after that?
- new File(fileName + ".tbl").renameTo(new File(fileName + ".old.tbl"));
+ // copy
+ if (!copy(oldFiler, newFiler, canonicalName)) {
+ newFiler.deleteFile();
+ return false;
+ }
- // prepare
- BTreeCopy filer = new BTreeCopy();
- filer.setLocation(root, name + ".old");
- filer.setConfig(col.getFiler().getConfig());
- if (!filer.exists()) {
- System.out.println("Filer for " + fileName + ".old.tbl does not exists");
- return;
+ oldFiler.deleteFile();
+ if (!newFiler.getFilerFile().renameTo(oldFiler.getFilerFile())) {
+ log.error("Could not rename successfully rebuilt file " + newFileName + " to " + oldFileName);
+ return false;
}
- BTreeFiler newFiler = new BTreeFiler();
- newFiler.setLocation(root, name);
- newFiler.setConfig(col.getFiler().getConfig());
- if (newFiler.exists()) {
- System.out.println("Filer for " + fileName + ".tbl already exists");
- return;
+ try {
+ col.getFiler().open();
+ } catch (DBException e) {
+ log.error("Could not open new file " + oldFileName, e);
+ return false;
}
- // copy
- newFiler.create();
+ return true;
+ }
+
+ private static boolean copy(FilerCopy oldFiler, FilerCopy newFiler, String canonicalName) {
try {
- filer.open();
+ newFiler.create();
+ oldFiler.open();
newFiler.open();
-
- filer.copy(newFiler);
- if (removeBackupFiles) {
- filer.deleteFile();
- }
+ oldFiler.copy(newFiler);
+ } catch (Exception e) {
+ log.error("Error copying collection " + canonicalName, e);
+ return false;
} finally {
try {
- filer.close();
- } catch (Exception e) {
- e.printStackTrace();
+ oldFiler.close();
+ } catch (DBException e) {
+ if (log.isWarnEnabled()) log.warn(e);
}
try {
newFiler.close();
- } catch (Exception e) {
- e.printStackTrace();
+ } catch (DBException e) {
+ if (log.isWarnEnabled()) log.warn(e);
}
}
+
+ return true;
}
- private static void rebuildIndex(Collection col) throws DBException {
- if (col.getFiler() != null) {
+ private static boolean rebuildIndex(Collection col) {
+ if (col.getFiler() == null) {
+ return true;
+ }
+
+ try {
String[] list = col.listIndexers();
for (int i = 0; i < list.length; i++) {
Indexer idx = col.getIndexer(list[i]);
Configuration idxConf = idx.getConfig();
- System.out.println("Rebuilding index " + list[i] + " for collection " + col.getName());
+ if (log.isInfoEnabled()) {
+ log.info("Rebuilding index " + list[i] + " for collection " + col.getCanonicalName());
+ }
col.dropIndexer(idx);
col.createIndexer(idxConf);
}
+ } catch (DBException e) {
+ log.error("Could not rebuild index for collection " + col.getCanonicalName(), e);
+ return false;
}
+
+ return true;
+ }
+
+ private interface FilerCopy extends Filer {
+ public Value getValue(long pointer) throws IOException;
+
+ public void copy(Filer newFiler) throws IOException, DBException;
+
+ public boolean deleteFile();
+
+ public File getFilerFile();
}
- private static class BTreeCopy extends BTreeFiler {
- private Value getValue(long pointer) throws IOException {
+ private static class BTreeCopy extends BTreeFiler implements FilerCopy {
+ public Value getValue(long pointer) throws IOException {
return super.readValue(pointer);
}
- private void copy(BTreeFiler newFiler) throws XindiceException, IOException {
+ public void copy(Filer newFiler) throws IOException, BTreeException {
query(null, new CopyCallback(this, newFiler));
}
- private boolean deleteFile() {
+ public boolean deleteFile() {
return getFile().delete();
}
+
+ public File getFilerFile() {
+ return getFile();
+ }
+ }
+
+ private static class HashCopy extends HashFiler implements FilerCopy {
+ public Value getValue(long pointer) throws IOException {
+ return super.readValue(pointer);
+ }
+
+ public void copy(Filer newFiler) throws IOException, DBException {
+ long hashSize = getFileHeader().getPageCount();
+
+ for (long i = 0; i < hashSize; i++) {
+ Page page = getPage(i);
+
+ while (true) {
+ HashPageHeader ph = (HashPageHeader) page.getPageHeader();
+
+ if (ph.getStatus() == RECORD) {
+ Value value = readValue(page);
+ newFiler.writeRecord(page.getKey(), value);
+
+ long next = ph.getNextCollision();
+ if (next != NO_PAGE) {
+ page = getPage(ph.getNextCollision());
+ } else {
+ break;
+ }
+ } else {
+ break;
+ }
+ }
+ }
+ }
+
+ public boolean deleteFile() {
+ return getFile().delete();
+ }
+
+ public File getFilerFile() {
+ return getFile();
+ }
}
private static class CopyCallback implements BTreeCallback {
private BTreeCopy filer;
- private BTreeFiler newFiler;
+ private Filer newFiler;
- public CopyCallback(BTreeCopy filer, BTreeFiler newFiler) {
+ public CopyCallback(BTreeCopy filer, Filer newFiler) {
this.filer = filer;
this.newFiler = newFiler;
}
Modified: xml/xindice/trunk/status.xml
URL: http://svn.apache.org/viewvc/xml/xindice/trunk/status.xml?view=diff&rev=525304&r1=525303&r2=525304
==============================================================================
--- xml/xindice/trunk/status.xml (original)
+++ xml/xindice/trunk/status.xml Tue Apr 3 15:09:04 2007
@@ -119,6 +119,9 @@
<changes>
<release version="1.1b5-dev" date="(not released)">
<action dev="VG" type="update" fixes-bug="41854" due-to="Natalia Shilenkova">
+ Add support for HashFiler in database rebuild tool.
+ </action>
+ <action dev="VG" type="update" fixes-bug="41854" due-to="Natalia Shilenkova">
Add support for filer parameter in command line tools.
</action>
<action dev="VG" type="update">