You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@openjpa.apache.org by pc...@apache.org on 2006/06/28 21:34:40 UTC
svn commit: r417856 [18/22] - in /incubator/openjpa/trunk/openjpa-lib: java/
main/ main/java/ main/java/org/apache/openjpa/lib/ant/
main/java/org/apache/openjpa/lib/conf/ main/java/org/apache/openjpa/lib/jdbc/
main/java/org/apache/openjpa/lib/log/ main...
Added: incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/AbstractSet.java
URL: http://svn.apache.org/viewvc/incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/AbstractSet.java?rev=417856&view=auto
==============================================================================
--- incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/AbstractSet.java (added)
+++ incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/AbstractSet.java Wed Jun 28 12:34:33 2006
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Written by Dawid Kurzyniec, based on public domain code written by Doug Lea
+ * and publictly available documentation, and released to the public domain, as
+ * explained at http://creativecommons.org/licenses/publicdomain
+ */
+package org.apache.openjpa.lib.util.concurrent;
+
+
+/**
+ * Overrides toArray() and toArray(Object[]) in AbstractCollection to provide
+ * implementations valid for concurrent sets.
+ *
+ * @author Doug Lea
+ * @author Dawid Kurzyniec
+ */
+abstract class AbstractSet extends java.util.AbstractSet {
+ /**
+ * Sole constructor. (For invocation by subclass constructors, typically
+ * implicit.)
+ */
+ protected AbstractSet() {
+ super();
+ }
+
+ public Object[] toArray() {
+ return Utils.collectionToArray(this);
+ }
+
+ public Object[] toArray(Object[] a) {
+ return Utils.collectionToArray(this, a);
+ }
+}
Propchange: incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/AbstractSet.java
------------------------------------------------------------------------------
svn:executable = *
Added: incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/Arrays.java
URL: http://svn.apache.org/viewvc/incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/Arrays.java?rev=417856&view=auto
==============================================================================
--- incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/Arrays.java (added)
+++ incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/Arrays.java Wed Jun 28 12:34:33 2006
@@ -0,0 +1,1051 @@
+/*
+ * Copyright 2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Written by Dawid Kurzyniec, based on code written by Doug Lea with assistance
+ * from members of JCP JSR-166 Expert Group. Released to the public domain,
+ * as explained at http://creativecommons.org/licenses/publicdomain.
+ */
+package org.apache.openjpa.lib.util.concurrent;
+
+import java.lang.reflect.Array;
+
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.List;
+
+
+class Arrays {
+ private Arrays() {
+ }
+
+ public static void sort(long[] a) {
+ java.util.Arrays.sort(a);
+ }
+
+ public static void sort(long[] a, int fromIndex, int toIndex) {
+ java.util.Arrays.sort(a, fromIndex, toIndex);
+ }
+
+ public static void sort(int[] a) {
+ java.util.Arrays.sort(a);
+ }
+
+ public static void sort(int[] a, int fromIndex, int toIndex) {
+ java.util.Arrays.sort(a, fromIndex, toIndex);
+ }
+
+ public static void sort(short[] a) {
+ java.util.Arrays.sort(a);
+ }
+
+ public static void sort(short[] a, int fromIndex, int toIndex) {
+ java.util.Arrays.sort(a, fromIndex, toIndex);
+ }
+
+ public static void sort(char[] a) {
+ java.util.Arrays.sort(a);
+ }
+
+ public static void sort(char[] a, int fromIndex, int toIndex) {
+ java.util.Arrays.sort(a, fromIndex, toIndex);
+ }
+
+ public static void sort(byte[] a) {
+ java.util.Arrays.sort(a);
+ }
+
+ public static void sort(byte[] a, int fromIndex, int toIndex) {
+ java.util.Arrays.sort(a, fromIndex, toIndex);
+ }
+
+ public static void sort(double[] a) {
+ java.util.Arrays.sort(a);
+ }
+
+ public static void sort(double[] a, int fromIndex, int toIndex) {
+ java.util.Arrays.sort(a, fromIndex, toIndex);
+ }
+
+ public static void sort(float[] a) {
+ java.util.Arrays.sort(a);
+ }
+
+ public static void sort(float[] a, int fromIndex, int toIndex) {
+ java.util.Arrays.sort(a, fromIndex, toIndex);
+ }
+
+ public static void sort(Object[] a) {
+ java.util.Arrays.sort(a);
+ }
+
+ public static void sort(Object[] a, int fromIndex, int toIndex) {
+ java.util.Arrays.sort(a, fromIndex, toIndex);
+ }
+
+ public static void sort(Object[] a, Comparator c) {
+ java.util.Arrays.sort(a, c);
+ }
+
+ public static void sort(Object[] a, int fromIndex, int toIndex, Comparator c) {
+ java.util.Arrays.sort(a, fromIndex, toIndex, c);
+ }
+
+ // Searching
+ public static int binarySearch(long[] a, long key) {
+ return java.util.Arrays.binarySearch(a, key);
+ }
+
+ public static int binarySearch(int[] a, int key) {
+ return java.util.Arrays.binarySearch(a, key);
+ }
+
+ public static int binarySearch(short[] a, short key) {
+ return java.util.Arrays.binarySearch(a, key);
+ }
+
+ public static int binarySearch(char[] a, char key) {
+ return java.util.Arrays.binarySearch(a, key);
+ }
+
+ public static int binarySearch(byte[] a, byte key) {
+ return java.util.Arrays.binarySearch(a, key);
+ }
+
+ public static int binarySearch(double[] a, double key) {
+ return java.util.Arrays.binarySearch(a, key);
+ }
+
+ public static int binarySearch(float[] a, float key) {
+ return java.util.Arrays.binarySearch(a, key);
+ }
+
+ public static int binarySearch(Object[] a, Object key) {
+ return java.util.Arrays.binarySearch(a, key);
+ }
+
+ public static int binarySearch(Object[] a, Object key, Comparator c) {
+ return java.util.Arrays.binarySearch(a, key, c);
+ }
+
+ // Equality Testing
+ public static boolean equals(long[] a, long[] a2) {
+ return java.util.Arrays.equals(a, a2);
+ }
+
+ public static boolean equals(int[] a, int[] a2) {
+ return java.util.Arrays.equals(a, a2);
+ }
+
+ public static boolean equals(short[] a, short[] a2) {
+ return java.util.Arrays.equals(a, a2);
+ }
+
+ public static boolean equals(char[] a, char[] a2) {
+ return java.util.Arrays.equals(a, a2);
+ }
+
+ public static boolean equals(byte[] a, byte[] a2) {
+ return java.util.Arrays.equals(a, a2);
+ }
+
+ public static boolean equals(boolean[] a, boolean[] a2) {
+ return java.util.Arrays.equals(a, a2);
+ }
+
+ public static boolean equals(double[] a, double[] a2) {
+ return java.util.Arrays.equals(a, a2);
+ }
+
+ public static boolean equals(float[] a, float[] a2) {
+ return java.util.Arrays.equals(a, a2);
+ }
+
+ public static boolean equals(Object[] a, Object[] a2) {
+ return java.util.Arrays.equals(a, a2);
+ }
+
+ // Filling
+ public static void fill(long[] a, long val) {
+ java.util.Arrays.fill(a, val);
+ }
+
+ public static void fill(long[] a, int fromIndex, int toIndex, long val) {
+ java.util.Arrays.fill(a, fromIndex, toIndex, val);
+ }
+
+ public static void fill(int[] a, int val) {
+ java.util.Arrays.fill(a, val);
+ }
+
+ public static void fill(int[] a, int fromIndex, int toIndex, int val) {
+ java.util.Arrays.fill(a, fromIndex, toIndex, val);
+ }
+
+ public static void fill(short[] a, short val) {
+ java.util.Arrays.fill(a, val);
+ }
+
+ public static void fill(short[] a, int fromIndex, int toIndex, short val) {
+ java.util.Arrays.fill(a, fromIndex, toIndex, val);
+ }
+
+ public static void fill(char[] a, char val) {
+ java.util.Arrays.fill(a, val);
+ }
+
+ public static void fill(char[] a, int fromIndex, int toIndex, char val) {
+ java.util.Arrays.fill(a, fromIndex, toIndex, val);
+ }
+
+ public static void fill(byte[] a, byte val) {
+ java.util.Arrays.fill(a, val);
+ }
+
+ public static void fill(byte[] a, int fromIndex, int toIndex, byte val) {
+ java.util.Arrays.fill(a, fromIndex, toIndex, val);
+ }
+
+ public static void fill(boolean[] a, boolean val) {
+ java.util.Arrays.fill(a, val);
+ }
+
+ public static void fill(boolean[] a, int fromIndex, int toIndex, boolean val) {
+ java.util.Arrays.fill(a, fromIndex, toIndex, val);
+ }
+
+ public static void fill(double[] a, double val) {
+ java.util.Arrays.fill(a, val);
+ }
+
+ public static void fill(double[] a, int fromIndex, int toIndex, double val) {
+ java.util.Arrays.fill(a, fromIndex, toIndex, val);
+ }
+
+ public static void fill(float[] a, float val) {
+ java.util.Arrays.fill(a, val);
+ }
+
+ public static void fill(float[] a, int fromIndex, int toIndex, float val) {
+ java.util.Arrays.fill(a, fromIndex, toIndex, val);
+ }
+
+ public static void fill(Object[] a, Object val) {
+ java.util.Arrays.fill(a, val);
+ }
+
+ public static void fill(Object[] a, int fromIndex, int toIndex, Object val) {
+ java.util.Arrays.fill(a, fromIndex, toIndex, val);
+ }
+
+ // Cloning
+
+ /**
+ * @since 1.6
+ */
+ public static Object[] copyOf(Object[] original, int newLength) {
+ return copyOf(original, newLength, original.getClass());
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static Object[] copyOf(Object[] original, int newLength,
+ Class newType) {
+ Object[] arr = (newType == Object[].class) ? new Object[newLength]
+ : (Object[]) Array.newInstance(newType.getComponentType(),
+ newLength);
+ int len = ((original.length < newLength) ? original.length : newLength);
+ System.arraycopy(original, 0, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static byte[] copyOf(byte[] original, int newLength) {
+ byte[] arr = new byte[newLength];
+ int len = ((original.length < newLength) ? original.length : newLength);
+ System.arraycopy(original, 0, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static short[] copyOf(short[] original, int newLength) {
+ short[] arr = new short[newLength];
+ int len = ((original.length < newLength) ? original.length : newLength);
+ System.arraycopy(original, 0, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static int[] copyOf(int[] original, int newLength) {
+ int[] arr = new int[newLength];
+ int len = ((original.length < newLength) ? original.length : newLength);
+ System.arraycopy(original, 0, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static long[] copyOf(long[] original, int newLength) {
+ long[] arr = new long[newLength];
+ int len = ((original.length < newLength) ? original.length : newLength);
+ System.arraycopy(original, 0, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static char[] copyOf(char[] original, int newLength) {
+ char[] arr = new char[newLength];
+ int len = ((original.length < newLength) ? original.length : newLength);
+ System.arraycopy(original, 0, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static float[] copyOf(float[] original, int newLength) {
+ float[] arr = new float[newLength];
+ int len = ((original.length < newLength) ? original.length : newLength);
+ System.arraycopy(original, 0, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static double[] copyOf(double[] original, int newLength) {
+ double[] arr = new double[newLength];
+ int len = ((original.length < newLength) ? original.length : newLength);
+ System.arraycopy(original, 0, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static boolean[] copyOf(boolean[] original, int newLength) {
+ boolean[] arr = new boolean[newLength];
+ int len = ((original.length < newLength) ? original.length : newLength);
+ System.arraycopy(original, 0, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static Object[] copyOfRange(Object[] original, int from, int to) {
+ return copyOfRange(original, from, to, original.getClass());
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static Object[] copyOfRange(Object[] original, int from, int to,
+ Class newType) {
+ int newLength = to - from;
+
+ if (newLength < 0) {
+ throw new IllegalArgumentException(from + " > " + to);
+ }
+
+ Object[] arr = (newType == Object[].class) ? new Object[newLength]
+ : (Object[]) Array.newInstance(newType.getComponentType(),
+ newLength);
+ int ceil = original.length - from;
+ int len = (ceil < newLength) ? ceil : newLength;
+ System.arraycopy(original, from, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static byte[] copyOfRange(byte[] original, int from, int to) {
+ int newLength = to - from;
+
+ if (newLength < 0) {
+ throw new IllegalArgumentException(from + " > " + to);
+ }
+
+ byte[] arr = new byte[newLength];
+ int ceil = original.length - from;
+ int len = (ceil < newLength) ? ceil : newLength;
+ System.arraycopy(original, from, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static short[] copyOfRange(short[] original, int from, int to) {
+ int newLength = to - from;
+
+ if (newLength < 0) {
+ throw new IllegalArgumentException(from + " > " + to);
+ }
+
+ short[] arr = new short[newLength];
+ int ceil = original.length - from;
+ int len = (ceil < newLength) ? ceil : newLength;
+ System.arraycopy(original, from, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static int[] copyOfRange(int[] original, int from, int to) {
+ int newLength = to - from;
+
+ if (newLength < 0) {
+ throw new IllegalArgumentException(from + " > " + to);
+ }
+
+ int[] arr = new int[newLength];
+ int ceil = original.length - from;
+ int len = (ceil < newLength) ? ceil : newLength;
+ System.arraycopy(original, from, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static long[] copyOfRange(long[] original, int from, int to) {
+ int newLength = to - from;
+
+ if (newLength < 0) {
+ throw new IllegalArgumentException(from + " > " + to);
+ }
+
+ long[] arr = new long[newLength];
+ int ceil = original.length - from;
+ int len = (ceil < newLength) ? ceil : newLength;
+ System.arraycopy(original, from, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static char[] copyOfRange(char[] original, int from, int to) {
+ int newLength = to - from;
+
+ if (newLength < 0) {
+ throw new IllegalArgumentException(from + " > " + to);
+ }
+
+ char[] arr = new char[newLength];
+ int ceil = original.length - from;
+ int len = (ceil < newLength) ? ceil : newLength;
+ System.arraycopy(original, from, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static float[] copyOfRange(float[] original, int from, int to) {
+ int newLength = to - from;
+
+ if (newLength < 0) {
+ throw new IllegalArgumentException(from + " > " + to);
+ }
+
+ float[] arr = new float[newLength];
+ int ceil = original.length - from;
+ int len = (ceil < newLength) ? ceil : newLength;
+ System.arraycopy(original, from, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static double[] copyOfRange(double[] original, int from, int to) {
+ int newLength = to - from;
+
+ if (newLength < 0) {
+ throw new IllegalArgumentException(from + " > " + to);
+ }
+
+ double[] arr = new double[newLength];
+ int ceil = original.length - from;
+ int len = (ceil < newLength) ? ceil : newLength;
+ System.arraycopy(original, from, arr, 0, len);
+
+ return arr;
+ }
+
+ /**
+ * @since 1.6
+ */
+ public static boolean[] copyOfRange(boolean[] original, int from, int to) {
+ int newLength = to - from;
+
+ if (newLength < 0) {
+ throw new IllegalArgumentException(from + " > " + to);
+ }
+
+ boolean[] arr = new boolean[newLength];
+ int ceil = original.length - from;
+ int len = (ceil < newLength) ? ceil : newLength;
+ System.arraycopy(original, from, arr, 0, len);
+
+ return arr;
+ }
+
+ public static List asList(Object[] a) {
+ return java.util.Arrays.asList(a);
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static int hashCode(long[] a) {
+ if (a == null) {
+ return 0;
+ }
+
+ int hash = 1;
+
+ for (int i = 0; i < a.length; i++) {
+ long e = a[i];
+ hash = (31 * hash) + (int) (e ^ (e >>> 32));
+ }
+
+ return hash;
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static int hashCode(int[] a) {
+ if (a == null) {
+ return 0;
+ }
+
+ int hash = 1;
+
+ for (int i = 0; i < a.length; i++) {
+ hash = (31 * hash) + a[i];
+ }
+
+ return hash;
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static int hashCode(short[] a) {
+ if (a == null) {
+ return 0;
+ }
+
+ int hash = 1;
+
+ for (int i = 0; i < a.length; i++) {
+ hash = (31 * hash) + a[i];
+ }
+
+ return hash;
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static int hashCode(char[] a) {
+ if (a == null) {
+ return 0;
+ }
+
+ int hash = 1;
+
+ for (int i = 0; i < a.length; i++) {
+ hash = (31 * hash) + a[i];
+ }
+
+ return hash;
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static int hashCode(byte[] a) {
+ if (a == null) {
+ return 0;
+ }
+
+ int hash = 1;
+
+ for (int i = 0; i < a.length; i++) {
+ hash = (31 * hash) + a[i];
+ }
+
+ return hash;
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static int hashCode(boolean[] a) {
+ if (a == null) {
+ return 0;
+ }
+
+ int hash = 1;
+
+ for (int i = 0; i < a.length; i++) {
+ hash = (31 * hash) + (a[i] ? 1231 : 1237);
+ }
+
+ return hash;
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static int hashCode(float[] a) {
+ if (a == null) {
+ return 0;
+ }
+
+ int hash = 1;
+
+ for (int i = 0; i < a.length; i++) {
+ hash = (31 * hash) + Float.floatToIntBits(a[i]);
+ }
+
+ return hash;
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static int hashCode(double[] a) {
+ if (a == null) {
+ return 0;
+ }
+
+ int hash = 1;
+
+ for (int i = 0; i < a.length; i++) {
+ long e = Double.doubleToLongBits(a[i]);
+ hash = (31 * hash) + (int) (e ^ (e >>> 32));
+ }
+
+ return hash;
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static int hashCode(Object[] a) {
+ if (a == null) {
+ return 0;
+ }
+
+ int hash = 1;
+
+ for (int i = 0; i < a.length; i++) {
+ Object e = a[i];
+ hash = (31 * hash) + ((e == null) ? 0 : e.hashCode());
+ }
+
+ return hash;
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static int deepHashCode(Object[] a) {
+ if (a == null) {
+ return 0;
+ }
+
+ int hash = 1;
+
+ for (int i = 0; i < a.length; i++) {
+ Object e = a[i];
+ hash = (31 * hash) +
+ ((e instanceof Object[]) ? deepHashCode((Object[]) e)
+ : ((e instanceof byte[])
+ ? hashCode((byte[]) e)
+ : ((e instanceof short[]) ? hashCode((short[]) e)
+ : ((e instanceof int[])
+ ? hashCode((int[]) e)
+ : ((e instanceof long[]) ? hashCode((long[]) e)
+ : ((e instanceof char[])
+ ? hashCode((char[]) e)
+ : ((e instanceof boolean[]) ? hashCode((boolean[]) e)
+ : ((e instanceof float[])
+ ? hashCode((float[]) e)
+ : ((e instanceof double[]) ? hashCode((double[]) e)
+ : ((e != null) ? e.hashCode() : 0))))))))));
+ }
+
+ return hash;
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static boolean deepEquals(Object[] a1, Object[] a2) {
+ if (a1 == a2) {
+ return true;
+ }
+
+ if ((a1 == null) || (a2 == null)) {
+ return false;
+ }
+
+ int len = a1.length;
+
+ if (len != a2.length) {
+ return false;
+ }
+
+ for (int i = 0; i < len; i++) {
+ Object e1 = a1[i];
+ Object e2 = a2[i];
+
+ if (e1 == e2) {
+ continue;
+ }
+
+ if (e1 == null) {
+ return false;
+ }
+
+ boolean eq = ((e1.getClass() != e2.getClass()) ||
+ e1.getClass().isArray()) ? e1.equals(e2)
+ : ((e1 instanceof Object[] &&
+ e2 instanceof Object[])
+ ? deepEquals((Object[]) e1, (Object[]) e2)
+ : ((e1 instanceof byte[] && e2 instanceof byte[])
+ ? equals((byte[]) e1, (byte[]) e2)
+ : ((e1 instanceof short[] && e2 instanceof short[])
+ ? equals((short[]) e1, (short[]) e2)
+ : ((e1 instanceof int[] && e2 instanceof int[])
+ ? equals((int[]) e1, (int[]) e2)
+ : ((e1 instanceof long[] && e2 instanceof long[])
+ ? equals((long[]) e1, (long[]) e2)
+ : ((e1 instanceof char[] && e2 instanceof char[])
+ ? equals((char[]) e1, (char[]) e2)
+ : ((e1 instanceof boolean[] && e2 instanceof boolean[])
+ ? equals((boolean[]) e1, (boolean[]) e2)
+ : ((e1 instanceof float[] && e2 instanceof float[])
+ ? equals((float[]) e1, (float[]) e2)
+ : ((e1 instanceof double[] && e2 instanceof double[])
+ ? equals((double[]) e1, (double[]) e2) : e1.equals(e2))))))))));
+
+ if (!eq) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static String toString(long[] a) {
+ if (a == null) {
+ return "null";
+ }
+
+ if (a.length == 0) {
+ return "[]";
+ }
+
+ StringBuffer buf = new StringBuffer();
+ buf.append('[').append(a[0]);
+
+ for (int i = 1; i < a.length; i++)
+ buf.append(", ").append(a[i]);
+
+ buf.append(']');
+
+ return buf.toString();
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static String toString(int[] a) {
+ if (a == null) {
+ return "null";
+ }
+
+ if (a.length == 0) {
+ return "[]";
+ }
+
+ StringBuffer buf = new StringBuffer();
+ buf.append('[').append(a[0]);
+
+ for (int i = 1; i < a.length; i++)
+ buf.append(", ").append(a[i]);
+
+ buf.append(']');
+
+ return buf.toString();
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static String toString(short[] a) {
+ if (a == null) {
+ return "null";
+ }
+
+ if (a.length == 0) {
+ return "[]";
+ }
+
+ StringBuffer buf = new StringBuffer();
+ buf.append('[').append(a[0]);
+
+ for (int i = 1; i < a.length; i++)
+ buf.append(", ").append(a[i]);
+
+ buf.append(']');
+
+ return buf.toString();
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static String toString(char[] a) {
+ if (a == null) {
+ return "null";
+ }
+
+ if (a.length == 0) {
+ return "[]";
+ }
+
+ StringBuffer buf = new StringBuffer();
+ buf.append('[').append(a[0]);
+
+ for (int i = 1; i < a.length; i++)
+ buf.append(", ").append(a[i]);
+
+ buf.append(']');
+
+ return buf.toString();
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static String toString(byte[] a) {
+ if (a == null) {
+ return "null";
+ }
+
+ if (a.length == 0) {
+ return "[]";
+ }
+
+ StringBuffer buf = new StringBuffer();
+ buf.append('[').append(a[0]);
+
+ for (int i = 1; i < a.length; i++)
+ buf.append(", ").append(a[i]);
+
+ buf.append(']');
+
+ return buf.toString();
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static String toString(boolean[] a) {
+ if (a == null) {
+ return "null";
+ }
+
+ if (a.length == 0) {
+ return "[]";
+ }
+
+ StringBuffer buf = new StringBuffer();
+ buf.append('[').append(a[0]);
+
+ for (int i = 1; i < a.length; i++)
+ buf.append(", ").append(a[i]);
+
+ buf.append(']');
+
+ return buf.toString();
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static String toString(float[] a) {
+ if (a == null) {
+ return "null";
+ }
+
+ if (a.length == 0) {
+ return "[]";
+ }
+
+ StringBuffer buf = new StringBuffer();
+ buf.append('[').append(a[0]);
+
+ for (int i = 1; i < a.length; i++)
+ buf.append(", ").append(a[i]);
+
+ buf.append(']');
+
+ return buf.toString();
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static String toString(double[] a) {
+ if (a == null) {
+ return "null";
+ }
+
+ if (a.length == 0) {
+ return "[]";
+ }
+
+ StringBuffer buf = new StringBuffer();
+ buf.append('[').append(a[0]);
+
+ for (int i = 1; i < a.length; i++)
+ buf.append(", ").append(a[i]);
+
+ buf.append(']');
+
+ return buf.toString();
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static String toString(Object[] a) {
+ if (a == null) {
+ return "null";
+ }
+
+ if (a.length == 0) {
+ return "[]";
+ }
+
+ StringBuffer buf = new StringBuffer();
+ buf.append('[').append(a[0]);
+
+ for (int i = 1; i < a.length; i++)
+ buf.append(", ").append(a[i]);
+
+ buf.append(']');
+
+ return buf.toString();
+ }
+
+ /**
+ * @since 1.5
+ */
+ public static String deepToString(Object[] a) {
+ if (a == null) {
+ return "null";
+ }
+
+ StringBuffer buf = new StringBuffer();
+ deepToString(a, buf, new ArrayList());
+
+ return buf.toString();
+ }
+
+ private static void deepToString(Object[] a, StringBuffer buf, List seen) {
+ seen.add(a);
+ buf.append('[');
+
+ for (int i = 0; i < a.length; i++) {
+ if (i > 0) {
+ buf.append(", ");
+ }
+
+ Object e = a[i];
+
+ if (e == null) {
+ buf.append("null");
+ } else if (!e.getClass().isArray()) {
+ buf.append(e.toString());
+ } else if (e instanceof Object[]) {
+ if (seen.contains(e)) {
+ buf.append("[...]");
+ } else {
+ deepToString((Object[]) e, buf, seen);
+ }
+ } else {
+ // primitive arr
+ buf.append((e instanceof byte[]) ? toString((byte[]) e)
+ : ((e instanceof short[])
+ ? toString((short[]) e)
+ : ((e instanceof int[]) ? toString((int[]) e)
+ : ((e instanceof long[])
+ ? toString((long[]) e)
+ : ((e instanceof char[]) ? toString((char[]) e)
+ : ((e instanceof boolean[])
+ ? toString((boolean[]) e)
+ : ((e instanceof float[]) ? toString((float[]) e)
+ : ((e instanceof double[])
+ ? toString((double[]) e) : ""))))))));
+ }
+ }
+
+ buf.append(']');
+ seen.remove(seen.size() - 1);
+ }
+}
Propchange: incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/Arrays.java
------------------------------------------------------------------------------
svn:executable = *
Added: incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentHashMap.java
URL: http://svn.apache.org/viewvc/incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentHashMap.java?rev=417856&view=auto
==============================================================================
--- incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentHashMap.java (added)
+++ incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentHashMap.java Wed Jun 28 12:34:33 2006
@@ -0,0 +1,1050 @@
+/*
+ * Copyright 2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.openjpa.lib.util.concurrent;
+
+import org.apache.openjpa.lib.util.SizedMap;
+
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.io.Serializable;
+
+import java.util.AbstractCollection;
+import java.util.AbstractMap;
+import java.util.AbstractSet;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Random;
+import java.util.Set;
+
+
+/**
+ * This class implements a HashMap which has limited synchronization.
+ * In particular mutators are generally synchronized while accessors
+ * are generally not. Additionally the Iterators returned by this
+ * class are not "fail-fast", but instead try to continue to iterate
+ * over the data structure after changes have been made.
+ *
+ * The synchronization semantics are built right in to the
+ * implementation rather than using a delegating wrapper like the
+ * other collection classes do because it wasn't clear to me that the
+ * how the two should be seperated or that it would be useful to do
+ * so. This can probably be a topic for further debate in the
+ * future.
+ *
+ * This class is based heavily on the HashMap class in the Java
+ * collections package.
+ */
+public class ConcurrentHashMap extends AbstractMap implements ConcurrentMap,
+ SizedMap, Cloneable, Serializable {
+ /**
+ * The default initial capacity - MUST be a power of two.
+ */
+ private static final int DEFAULT_INITIAL_CAPACITY = 16;
+
+ /**
+ * The maximum capacity, used if a higher value is implicitly specified
+ * by either of the constructors with arguments.
+ * MUST be a power of two <= 1<<30.
+ */
+ private static final int MAXIMUM_CAPACITY = 1 << 30;
+
+ /**
+ * The load fast used when none specified in constructor.
+ **/
+ private static final float DEFAULT_LOAD_FACTOR = 0.75f;
+
+ /**
+ * Cache of random numbers used in "random" methods, since generating them
+ * is expensive. We hope each map changes enough between cycling through
+ * this list that the overall effect is random enough.
+ */
+ static final double[] RANDOMS = new double[1000];
+
+ static {
+ Random random = new Random();
+
+ for (int i = 0; i < RANDOMS.length; i++)
+ RANDOMS[i] = random.nextDouble();
+ }
+
+ // internal utilities
+
+ /**
+ * Value representing null keys inside tables.
+ */
+ private static final Object NULL_KEY = new Object();
+
+ // Types of Enumerations/Iterations
+ private static final int KEYS = 0;
+ private static final int VALUES = 1;
+ private static final int ENTRIES = 2;
+ private static final long serialVersionUID = -6452706556724125778L;
+
+ /**
+ * The table, resized as necessary. Length MUST Always be a power of two.
+ */
+ private transient Entry[] table;
+
+ /**
+ * The number of key-value mappings contained in this identity hash map.
+ */
+ private transient int size;
+
+ /**
+ * The next size value at which to resize (capacity * load factor).
+ * @serial
+ */
+ private int threshold;
+
+ /**
+ * The load factor for the hash table.
+ *
+ * @serial
+ */
+ private final float loadFactor;
+
+ /**
+ * Spread "random" removes and iteration.
+ */
+ private int randomEntry = 0;
+
+ /**
+ * Maximum entries.
+ */
+ private int maxSize = Integer.MAX_VALUE;
+
+ // Views
+ private transient Set entrySet = null;
+ private transient Set keySet = null;
+ private transient Collection values = null;
+
+ /**
+ * Constructs an empty <tt>ConcurrentHashMap</tt> with the specified initial
+ * capacity and load factor.
+ *
+ * @param initialCapacity The initial capacity.
+ * @param loadFactor The load factor.
+ * @throws IllegalArgumentException if the initial capacity is negative
+ * or the load factor is nonpositive.
+ */
+ public ConcurrentHashMap(int initialCapacity, float loadFactor) {
+ if (initialCapacity < 0) {
+ throw new IllegalArgumentException("Illegal initial capacity: " +
+ initialCapacity);
+ }
+
+ if (initialCapacity > MAXIMUM_CAPACITY) {
+ initialCapacity = MAXIMUM_CAPACITY;
+ }
+
+ if ((loadFactor <= 0) || (loadFactor > 1)) {
+ throw new IllegalArgumentException("Illegal load factor: " +
+ loadFactor);
+ }
+
+ // Find a power of 2 >= initialCapacity
+ int capacity = 1;
+
+ while (capacity < initialCapacity)
+ capacity <<= 1;
+
+ this.loadFactor = loadFactor;
+ threshold = (int) (capacity * loadFactor);
+ table = new Entry[capacity];
+ }
+
+ /**
+ * Constructs an empty <tt>ConcurrentHashMap</tt> with the specified initial
+ * capacity and the default load factor (0.75).
+ *
+ * @param initialCapacity the initial capacity.
+ * @throws IllegalArgumentException if the initial capacity is negative.
+ */
+ public ConcurrentHashMap(int initialCapacity) {
+ this(initialCapacity, DEFAULT_LOAD_FACTOR);
+ }
+
+ /**
+ * Constructs an empty <tt>ConcurrentHashMap</tt> with the default initial
+ * capacity (16) and the default load factor (0.75).
+ */
+ public ConcurrentHashMap() {
+ this(DEFAULT_INITIAL_CAPACITY, DEFAULT_LOAD_FACTOR);
+ }
+
+ /**
+ * Constructs a new <tt>ConcurrentHashMap</tt> with the same mappings as the
+ * specified <tt>Map</tt>. The <tt>ConcurrentHashMap</tt> is created with
+ * default load factor (0.75) and an initial capacity sufficient to
+ * hold the mappings in the specified <tt>Map</tt>.
+ *
+ * @param m the map whose mappings are to be placed in this map.
+ * @throws NullPointerException if the specified map is null.
+ */
+ public ConcurrentHashMap(Map m) {
+ this(Math.max((int) (m.size() / DEFAULT_LOAD_FACTOR) + 1,
+ DEFAULT_INITIAL_CAPACITY), DEFAULT_LOAD_FACTOR);
+ putAll(m);
+ }
+
+ /**
+ * Returns internal representation for key. Use NULL_KEY if key is null.
+ */
+ private static Object maskNull(Object key) {
+ return ((key == null) ? NULL_KEY : key);
+ }
+
+ /**
+ * Returns key represented by specified internal representation.
+ */
+ private static Object unmaskNull(Object key) {
+ return ((key == NULL_KEY) ? null : key);
+ }
+
+ /**
+ * Returns a hash code for non-null Object x.
+ */
+ private static int hash(Object x) {
+ int h = x.hashCode();
+
+ return h - (h << 7); // i.e., -127 * h
+ }
+
+ /**
+ * Check for equality of non-null reference x and possibly-null y.
+ */
+ private static boolean eq(Object x, Object y) {
+ return (x == y) || x.equals(y);
+ }
+
+ /**
+ * Returns the current capacity of backing table in this map.
+ *
+ * @return the current capacity of backing table in this map.
+ */
+ public final int capacity() {
+ return table.length;
+ }
+
+ /**
+ * Returns the load factor for this map.
+ *
+ * @return the load factor for this map.
+ */
+ public final float loadFactor() {
+ return loadFactor;
+ }
+
+ public int getMaxSize() {
+ return maxSize;
+ }
+
+ public void setMaxSize(int maxSize) {
+ this.maxSize = (maxSize < 0) ? Integer.MAX_VALUE : maxSize;
+
+ if (this.maxSize != Integer.MAX_VALUE) {
+ removeOverflow(this.maxSize);
+ }
+ }
+
+ public boolean isFull() {
+ return (maxSize != Integer.MAX_VALUE) && (size() >= maxSize);
+ }
+
+ public void overflowRemoved(Object key, Object value) {
+ }
+
+ /**
+ * Returns the number of key-value mappings in this map.
+ *
+ * @return the number of key-value mappings in this map.
+ */
+ public final int size() {
+ return size;
+ }
+
+ /**
+ * Returns <tt>true</tt> if this map contains no key-value mappings.
+ *
+ * @return <tt>true</tt> if this map contains no key-value mappings.
+ */
+ public final boolean isEmpty() {
+ return size == 0;
+ }
+
+ /**
+ * Returns the value to which the specified key is mapped in this identity
+ * hash map, or <tt>null</tt> if the map contains no mapping for this key.
+ * A return value of <tt>null</tt> does not <i>necessarily</i> indicate
+ * that the map contains no mapping for the key; it is also possible that
+ * the map explicitly maps the key to <tt>null</tt>. The
+ * <tt>containsKey</tt> method may be used to distinguish these two cases.
+ *
+ * @param key the key whose associated value is to be returned.
+ * @return the value to which this map maps the specified key, or
+ * <tt>null</tt> if the map contains no mapping for this key.
+ * @see #put(Object, Object)
+ */
+ public Object get(Object key) {
+ Entry e = getEntry(key);
+
+ return (e == null) ? null : e.value;
+ }
+
+ /**
+ * Returns <tt>true</tt> if this map contains a mapping for the
+ * specified key.
+ *
+ * @param keyThe key whose presence in this map is to be tested
+ * @return <tt>true</tt> if this map contains a mapping for the specified
+ * key.
+ */
+ public final boolean containsKey(Object key) {
+ return getEntry(key) != null;
+ }
+
+ /**
+ * Returns the entry associated with the specified key in the
+ * ConcurrentHashMap. Returns null if the ConcurrentHashMap contains no
+ * mapping for this key.
+ */
+ protected Entry getEntry(Object key) {
+ Object k = maskNull(key);
+ int hash = hash(k);
+ Entry[] tab = table;
+
+ for (Entry e = tab[hash & (tab.length - 1)]; e != null; e = e.next) {
+ if ((e.hash == hash) && eq(k, e.key)) {
+ return e;
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Associates the specified value with the specified key in this map.
+ * If the map previously contained a mapping for this key, the old
+ * value is replaced.
+ *
+ * @param key key with which the specified value is to be associated.
+ * @param value value to be associated with the specified key.
+ * @return previous value associated with specified key, or <tt>null</tt>
+ * if there was no mapping for key. A <tt>null</tt> return can
+ * also indicate that the ConcurrentHashMap previously associated
+ * <tt>null</tt> with the specified key.
+ */
+ public Object put(Object key, Object value) {
+ Object k = maskNull(key);
+ int hash = hash(k);
+
+ synchronized (this) {
+ int i = hash & (table.length - 1);
+
+ for (Entry e = table[i]; e != null; e = e.next) {
+ if ((e.hash == hash) && eq(k, e.key)) {
+ Object oldValue = e.value;
+ e.value = value;
+
+ return oldValue;
+ }
+ }
+
+ if (maxSize != Integer.MAX_VALUE) {
+ removeOverflow(maxSize - 1);
+ }
+
+ table[i] = createEntry(hash, k, value, table[i]);
+
+ if (size++ >= threshold) {
+ resize(2 * table.length);
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Remove any entries equal to or over the max size.
+ */
+ private void removeOverflow(int maxSize) {
+ while (size > maxSize) {
+ Map.Entry entry = removeRandom();
+
+ if (entry == null) {
+ break;
+ }
+
+ overflowRemoved(entry.getKey(), entry.getValue());
+ }
+ }
+
+ public Object putIfAbsent(Object key, Object value) {
+ Object k = maskNull(key);
+ int hash = hash(k);
+
+ synchronized (this) {
+ int i = hash & (table.length - 1);
+
+ for (Entry e = table[i]; e != null; e = e.next) {
+ if ((e.hash == hash) && eq(k, e.key)) {
+ return e.value;
+ }
+ }
+
+ if (maxSize != Integer.MAX_VALUE) {
+ removeOverflow(maxSize - 1);
+ }
+
+ table[i] = createEntry(hash, k, value, table[i]);
+
+ if (size++ >= threshold) {
+ resize(2 * table.length);
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Rehashes the contents of this map into a new <tt>ConcurrentHashMap</tt>
+ * instance with a larger capacity. This method is called automatically when
+ * the number of keys in this map exceeds its capacity and load factor.
+ *
+ * @param newCapacity the new capacity, MUST be a power of two.
+ */
+ private void resize(int newCapacity) {
+ // assert (newCapacity & -newCapacity) == newCapacity; // power of 2
+ Entry[] oldTable = table;
+ int oldCapacity = oldTable.length;
+
+ // check if needed
+ if ((size < threshold) || (oldCapacity > newCapacity)) {
+ return;
+ }
+
+ Entry[] newTable = new Entry[newCapacity];
+ int mask = newCapacity - 1;
+
+ for (int i = oldCapacity; i-- > 0;) {
+ for (Entry e = oldTable[i]; e != null; e = e.next) {
+ Entry clone = (Entry) e.clone();
+ int j = clone.hash & mask;
+ clone.next = newTable[j];
+ newTable[j] = clone;
+ }
+ }
+
+ table = newTable;
+ threshold = (int) (newCapacity * loadFactor);
+ }
+
+ /**
+ * Copies all of the mappings from the specified map to this map
+ * These mappings will replace any mappings that
+ * this map had for any of the keys currently in the specified map.
+ *
+ * @param t mappings to be stored in this map.
+ * @throws NullPointerException if the specified map is null.
+ */
+ public final synchronized void putAll(Map t) {
+ // Expand enough to hold t's elements without resizing.
+ int n = t.size();
+
+ if (n == 0) {
+ return;
+ }
+
+ if (n >= threshold) {
+ n = (int) ((n / loadFactor) + 1);
+
+ if (n > MAXIMUM_CAPACITY) {
+ n = MAXIMUM_CAPACITY;
+ }
+
+ int capacity = table.length;
+
+ while (capacity < n)
+ capacity <<= 1;
+
+ resize(capacity);
+ }
+
+ for (Iterator i = t.entrySet().iterator(); i.hasNext();) {
+ Map.Entry e = (Map.Entry) i.next();
+ put(e.getKey(), e.getValue());
+ }
+ }
+
+ /**
+ * Removes the mapping for this key from this map if present.
+ *
+ * @param key key whose mapping is to be removed from the map.
+ * @return previous value associated with specified key, or <tt>null</tt>
+ * if there was no mapping for key. A <tt>null</tt> return can
+ * also indicate that the map previously associated <tt>null</tt>
+ * with the specified key.
+ */
+ public Object remove(Object key) {
+ Entry e = removeEntryForKey(key);
+
+ return ((e == null) ? e : e.value);
+ }
+
+ /**
+ * Removes and returns the entry associated with the specified key in the
+ * ConcurrentHashMap. Returns null if the ConcurrentHashMap contains no
+ * mapping for this key.
+ */
+ private Entry removeEntryForKey(Object key) {
+ Object k = maskNull(key);
+ int hash = hash(k);
+
+ synchronized (this) {
+ int i = hash & (table.length - 1);
+ Entry e = table[i];
+
+ if (e == null) {
+ return null;
+ }
+
+ if ((e.hash == hash) && eq(k, e.key)) {
+ size--;
+ table[i] = e.next;
+
+ return e;
+ }
+
+ Entry prev = e;
+
+ for (e = e.next; e != null; prev = e, e = e.next) {
+ if ((e.hash == hash) && eq(k, e.key)) {
+ size--;
+ prev.next = e.next;
+
+ return e;
+ }
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Special version of remove for EntrySet.
+ */
+ private Entry removeMapping(Object o) {
+ if (!(o instanceof Map.Entry)) {
+ return null;
+ }
+
+ Map.Entry entry = (Map.Entry) o;
+ Object k = maskNull(entry.getKey());
+ int hash = hash(k);
+
+ synchronized (this) {
+ int i = hash & (table.length - 1);
+ Entry e = table[i];
+
+ if (e == null) {
+ return null;
+ }
+
+ if ((e.hash == hash) && e.equals(entry)) {
+ size--;
+ table[i] = e.next;
+
+ return e;
+ }
+
+ Entry prev = e;
+
+ for (e = e.next; e != null; prev = e, e = e.next) {
+ if ((e.hash == hash) && e.equals(entry)) {
+ size--;
+ prev.next = e.next;
+
+ return e;
+ }
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Removes all mappings from this map.
+ */
+ public synchronized void clear() {
+ table = new Entry[table.length];
+ size = 0;
+ }
+
+ /**
+ * Return an arbitrary entry index.
+ */
+ private int randomEntryIndex() {
+ if (randomEntry == RANDOMS.length) {
+ randomEntry = 0;
+ }
+
+ return (int) (RANDOMS[randomEntry++] * table.length);
+ }
+
+ public Map.Entry removeRandom() {
+ if (size == 0) {
+ return null;
+ }
+
+ synchronized (this) {
+ int random = randomEntryIndex();
+ int index = findEntry(random, (random % 2) == 0, false);
+
+ if (index == -1) {
+ return null;
+ }
+
+ Entry rem = table[index];
+ table[index] = rem.next;
+ size--;
+
+ return rem;
+ }
+ }
+
+ /**
+ * Find the index of the entry nearest the given index, starting in the
+ * given direction.
+ */
+ private int findEntry(int start, boolean forward, boolean searchedOther) {
+ if (forward) {
+ for (int i = start; i < table.length; i++)
+ if (table[i] != null) {
+ return i;
+ }
+
+ return (searchedOther || (start == 0)) ? (-1)
+ : findEntry(start - 1,
+ false, true);
+ } else {
+ for (int i = start; i >= 0; i--)
+ if (table[i] != null) {
+ return i;
+ }
+
+ return (searchedOther || (start == (table.length - 1))) ? (-1)
+ : findEntry(start +
+ 1, true, true);
+ }
+ }
+
+ public Iterator randomEntryIterator() {
+ // pass index so calculated before iterator refs table, in case table
+ // gets replace with a larger one
+ return new HashIterator(ENTRIES, randomEntryIndex());
+ }
+
+ /**
+ * Returns <tt>true</tt> if this map maps one or more keys to the
+ * specified value.
+ *
+ * @param value value whose presence in this map is to be tested.
+ * @return <tt>true</tt> if this map maps one or more keys to the
+ * specified value.
+ */
+ public final boolean containsValue(Object value) {
+ if (value == null) {
+ return containsNullValue();
+ }
+
+ Entry[] tab = table;
+
+ for (int i = 0; i < tab.length; i++) {
+ for (Entry e = tab[i]; e != null; e = e.next) {
+ if (value.equals(e.value)) {
+ return true;
+ }
+ }
+ }
+
+ return false;
+ }
+
+ /**
+ * Special-case code for containsValue with null argument
+ */
+ private boolean containsNullValue() {
+ Entry[] tab = table;
+
+ for (int i = 0; i < tab.length; i++) {
+ for (Entry e = tab[i]; e != null; e = e.next) {
+ if (e.value == null) {
+ return true;
+ }
+ }
+ }
+
+ return false;
+ }
+
+ /**
+ * Returns a shallow copy of this <tt>ConcurrentHashMap</tt> instance: the
+ * keys and values themselves are not cloned.
+ *
+ * @return a shallow copy of this map.
+ */
+ public final Object clone() {
+ return new ConcurrentHashMap(this);
+ }
+
+ protected Entry createEntry(int h, Object k, Object v, Entry n) {
+ return new Entry(h, k, v, n);
+ }
+
+ /**
+ * Returns a set view of the keys contained in this map. The set is
+ * backed by the map, so changes to the map are reflected in the set, and
+ * vice-versa. The set supports element removal, which removes the
+ * corresponding mapping from this map, via the <tt>Iterator.remove</tt>,
+ * <tt>Set.remove</tt>, <tt>removeAll</tt>, <tt>retainAll</tt>, and
+ * <tt>clear</tt> operations. It does not support the <tt>add</tt> or
+ * <tt>addAll</tt> operations.
+ *
+ * @return a set view of the keys contained in this map.
+ */
+ public final Set keySet() {
+ Set ks = keySet;
+
+ return ((ks != null) ? ks : (keySet = new KeySet()));
+ }
+
+ /**
+ * Returns a collection view of the values contained in this map. The
+ * collection is backed by the map, so changes to the map are reflected in
+ * the collection, and vice-versa. The collection supports element
+ * removal, which removes the corresponding mapping from this map, via the
+ * <tt>Iterator.remove</tt>, <tt>Collection.remove</tt>,
+ * <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt> operations.
+ * It does not support the <tt>add</tt> or <tt>addAll</tt> operations.
+ *
+ * @return a collection view of the values contained in this map.
+ */
+ public final Collection values() {
+ Collection vs = values;
+
+ return ((vs != null) ? vs : (values = new Values()));
+ }
+
+ /**
+ * Returns a collection view of the mappings contained in this map. Each
+ * element in the returned collection is a <tt>Map.Entry</tt>. The
+ * collection is backed by the map, so changes to the map are reflected in
+ * the collection, and vice-versa. The collection supports element
+ * removal, which removes the corresponding mapping from the map, via the
+ * <tt>Iterator.remove</tt>, <tt>Collection.remove</tt>,
+ * <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt> operations.
+ * It does not support the <tt>add</tt> or <tt>addAll</tt> operations.
+ *
+ * @return a collection view of the mappings contained in this map.
+ * @see Map.Entry
+ */
+ public final Set entrySet() {
+ Set es = entrySet;
+
+ return ((es != null) ? es : (entrySet = new EntrySet()));
+ }
+
+ /**
+ * Save the state of the <tt>ConcurrentHashMap</tt> instance to a stream
+ * (i.e., serialize it).
+ *
+ * @serialData The <i>capacity</i> of the ConcurrentHashMap (the length of
+ * the bucket array) is emitted (int), followed by the <i>size</i> of the
+ * ConcurrentHashMap (the number of key-value mappings), followed by the key
+ * (Object) and value (Object) for each key-value mapping represented by the
+ * ConcurrentHashMap The key-value mappings are emitted in the order that
+ * they are returned by <tt>entrySet().iterator()</tt>.
+ *
+ */
+ private void writeObject(ObjectOutputStream s) throws IOException {
+ // Write out the threshold, loadfactor, and any hidden stuff
+ s.defaultWriteObject();
+
+ // Write out number of buckets
+ s.writeInt(table.length);
+
+ // Write out size (number of Mappings)
+ s.writeInt(size);
+ s.writeInt(maxSize);
+
+ // Write out keys and values (alternating)
+ for (Iterator i = entrySet().iterator(); i.hasNext();) {
+ Map.Entry e = (Map.Entry) i.next();
+ s.writeObject(e.getKey());
+ s.writeObject(e.getValue());
+ }
+ }
+
+ /**
+ * Reconstitute the <tt>ConcurrentHashMap</tt> instance from a stream (i.e.,
+ * deserialize it).
+ */
+ private void readObject(ObjectInputStream s)
+ throws IOException, ClassNotFoundException {
+ // Read in the threshold, loadfactor, and any hidden stuff
+ s.defaultReadObject();
+
+ // Read in number of buckets and allocate the bucket array;
+ int numBuckets = s.readInt();
+ table = new Entry[numBuckets];
+
+ // Read in size (number of Mappings)
+ int size = s.readInt();
+ int maxSize = s.readInt();
+
+ // Read the keys and values, and put the mappings in the
+ // ConcurrentHashMap
+ for (int i = 0; i < size; i++) {
+ Object key = s.readObject();
+ Object value = s.readObject();
+ put(key, value);
+ }
+ }
+
+ protected static class Entry implements Map.Entry {
+ final Object key;
+ Object value;
+ final int hash;
+ Entry next;
+
+ /**
+ * Create new entry.
+ */
+ protected Entry(int h, Object k, Object v, Entry n) {
+ value = v;
+ next = n;
+ key = k;
+ hash = h;
+ }
+
+ public Object getKey() {
+ return unmaskNull(key);
+ }
+
+ public Object getValue() {
+ return value;
+ }
+
+ public Object setValue(Object newValue) {
+ Object oldValue = value;
+ value = newValue;
+
+ return oldValue;
+ }
+
+ public boolean equals(Object o) {
+ if (!(o instanceof Map.Entry)) {
+ return false;
+ }
+
+ Map.Entry e = (Map.Entry) o;
+ Object k1 = getKey();
+ Object k2 = e.getKey();
+
+ if ((k1 == k2) || ((k1 != null) && k1.equals(k2))) {
+ Object v1 = getValue();
+ Object v2 = e.getValue();
+
+ if ((v1 == v2) || ((v1 != null) && v1.equals(v2))) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ public int hashCode() {
+ return ((key == NULL_KEY) ? 0 : key.hashCode()) ^
+ ((value == null) ? 0 : value.hashCode());
+ }
+
+ public String toString() {
+ return getKey() + "=" + getValue();
+ }
+
+ protected Object clone() {
+ // It is the callers responsibility to set the next field
+ // correctly.
+ return new Entry(hash, key, value, null);
+ }
+ }
+
+ /**
+ * Map iterator.
+ */
+ private class HashIterator implements Iterator {
+ final Entry[] table = ConcurrentHashMap.this.table;
+ final int type;
+ int startIndex;
+ int stopIndex = 0;
+ int index;
+ Entry entry = null;
+ Entry lastReturned = null;
+
+ HashIterator(int type, int startIndex) {
+ this.type = type;
+ this.startIndex = startIndex;
+ index = startIndex;
+ }
+
+ public boolean hasNext() {
+ if (entry != null) {
+ return true;
+ }
+
+ while (index >= stopIndex) {
+ if ((entry = table[index--]) != null) {
+ return true;
+ }
+ }
+
+ if (stopIndex == 0) {
+ index = table.length - 1;
+ stopIndex = startIndex + 1;
+
+ while (index >= stopIndex) {
+ if ((entry = table[index--]) != null) {
+ return true;
+ }
+ }
+ }
+
+ return false;
+ }
+
+ public Object next() {
+ if (!hasNext()) {
+ throw new NoSuchElementException();
+ }
+
+ Entry e = lastReturned = entry;
+ entry = e.next;
+
+ return (type == KEYS) ? e.key : ((type == VALUES) ? e.value : e);
+ }
+
+ public void remove() {
+ if (lastReturned == null) {
+ throw new IllegalStateException();
+ }
+
+ synchronized (ConcurrentHashMap.this) {
+ Entry[] tab = ConcurrentHashMap.this.table;
+ int index = (lastReturned.hash & 0x7FFFFFFF) % tab.length;
+
+ for (Entry e = tab[index], prev = null; e != null;
+ prev = e, e = e.next) {
+ if (e == lastReturned) {
+ if (prev == null) {
+ tab[index] = e.next;
+ } else {
+ prev.next = e.next;
+ }
+
+ size--;
+ lastReturned = null;
+
+ return;
+ }
+ }
+
+ throw new Error("Iterated off table when doing remove");
+ }
+ }
+ }
+
+ private final class KeySet extends AbstractSet {
+ public Iterator iterator() {
+ return new HashIterator(KEYS, table.length - 1);
+ }
+
+ public int size() {
+ return size;
+ }
+
+ public boolean contains(Object o) {
+ return containsKey(o);
+ }
+
+ public boolean remove(Object o) {
+ return ConcurrentHashMap.this.removeEntryForKey(o) != null;
+ }
+
+ public void clear() {
+ ConcurrentHashMap.this.clear();
+ }
+ }
+
+ private final class Values extends AbstractCollection {
+ public Iterator iterator() {
+ return new HashIterator(VALUES, table.length - 1);
+ }
+
+ public int size() {
+ return size;
+ }
+
+ public boolean contains(Object o) {
+ return containsValue(o);
+ }
+
+ public void clear() {
+ ConcurrentHashMap.this.clear();
+ }
+ }
+
+ private final class EntrySet extends AbstractSet {
+ public Iterator iterator() {
+ return new HashIterator(ENTRIES, table.length - 1);
+ }
+
+ public boolean contains(Object o) {
+ if (!(o instanceof Map.Entry)) {
+ return false;
+ }
+
+ Map.Entry e = (Map.Entry) o;
+ Entry candidate = getEntry(e.getKey());
+
+ return (candidate != null) && candidate.equals(e);
+ }
+
+ public boolean remove(Object o) {
+ return removeMapping(o) != null;
+ }
+
+ public int size() {
+ return size;
+ }
+
+ public void clear() {
+ ConcurrentHashMap.this.clear();
+ }
+ }
+}
Propchange: incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentHashMap.java
------------------------------------------------------------------------------
svn:executable = *
Added: incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentHashSet.java
URL: http://svn.apache.org/viewvc/incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentHashSet.java?rev=417856&view=auto
==============================================================================
--- incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentHashSet.java (added)
+++ incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentHashSet.java Wed Jun 28 12:34:33 2006
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.openjpa.lib.util.concurrent;
+
+import org.apache.commons.collections.map.*;
+import org.apache.commons.collections.set.*;
+
+import java.io.*;
+
+import java.util.*;
+
+
+/**
+ * <p>A concurrent set.</p>
+ *
+ * @author Abe White
+ * @nojavadoc */
+public class ConcurrentHashSet implements Set, Serializable {
+ private static final Object DUMMY_VAL = new Object();
+ private final Set _set;
+
+ /**
+ * Construct a set with the given reference type.
+ */
+ public ConcurrentHashSet() {
+ _set = MapBackedSet.decorate(new ConcurrentHashMap(), DUMMY_VAL);
+ }
+
+ public boolean add(Object obj) {
+ return _set.add(obj);
+ }
+
+ public boolean addAll(Collection coll) {
+ return _set.addAll(coll);
+ }
+
+ public void clear() {
+ _set.clear();
+ }
+
+ public boolean contains(Object obj) {
+ return _set.contains(obj);
+ }
+
+ public boolean containsAll(Collection coll) {
+ return _set.containsAll(coll);
+ }
+
+ public boolean isEmpty() {
+ return _set.isEmpty();
+ }
+
+ public Iterator iterator() {
+ return _set.iterator();
+ }
+
+ public boolean remove(Object obj) {
+ return _set.remove(obj);
+ }
+
+ public boolean removeAll(Collection coll) {
+ return _set.removeAll(coll);
+ }
+
+ public boolean retainAll(Collection coll) {
+ return _set.retainAll(coll);
+ }
+
+ public int size() {
+ return _set.size();
+ }
+
+ public Object[] toArray() {
+ return _set.toArray();
+ }
+
+ public Object[] toArray(Object[] arr) {
+ return _set.toArray(arr);
+ }
+
+ public int hashCode() {
+ return _set.hashCode();
+ }
+
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj instanceof ConcurrentHashSet) {
+ obj = ((ConcurrentHashSet) obj)._set;
+ }
+
+ return _set.equals(obj);
+ }
+}
Propchange: incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentHashSet.java
------------------------------------------------------------------------------
svn:executable = *
Added: incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentLinkedQueue.java
URL: http://svn.apache.org/viewvc/incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentLinkedQueue.java?rev=417856&view=auto
==============================================================================
--- incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentLinkedQueue.java (added)
+++ incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentLinkedQueue.java Wed Jun 28 12:34:33 2006
@@ -0,0 +1,558 @@
+/*
+ * Copyright 2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Written by Doug Lea with assistance from members of JCP JSR-166
+ * Expert Group and released to the public domain, as explained at
+ * http://creativecommons.org/licenses/publicdomain
+ */
+package org.apache.openjpa.lib.util.concurrent;
+
+import java.io.*;
+
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+
+
+/**
+ * An unbounded thread-safe {@linkplain Queue queue} based on linked nodes.
+ * This queue orders elements FIFO (first-in-first-out).
+ * The <em>head</em> of the queue is that element that has been on the
+ * queue the longest time.
+ * The <em>tail</em> of the queue is that element that has been on the
+ * queue the shortest time. New elements
+ * are inserted at the tail of the queue, and the queue retrieval
+ * operations obtain elements at the head of the queue.
+ * A <tt>ConcurrentLinkedQueue</tt> is an appropriate choice when
+ * many threads will share access to a common collection.
+ * This queue does not permit <tt>null</tt> elements.
+ *
+ * <p>This implementation employs an efficient "wait-free"
+ * algorithm based on one described in <a
+ * href="http://www.cs.rochester.edu/u/michael/PODC96.html"> Simple,
+ * Fast, and Practical Non-Blocking and Blocking Concurrent Queue
+ * Algorithms</a> by Maged M. Michael and Michael L. Scott.
+ *
+ * <p>Beware that, unlike in most collections, the <tt>size</tt> method
+ * is <em>NOT</em> a constant-time operation. Because of the
+ * asynchronous nature of these queues, determining the current number
+ * of elements requires a traversal of the elements.
+ *
+ * <p>This class and its iterator implement all of the
+ * <em>optional</em> methods of the {@link Collection} and {@link
+ * Iterator} interfaces.
+ *
+ * <p>Memory consistency effects: As with other concurrent
+ * collections, actions in a thread prior to placing an object into a
+ * {@code ConcurrentLinkedQueue}
+ * <a href="package-summary.html#MemoryVisibility"><i>happen-before</i></a>
+ * actions subsequent to the access or removal of that element from
+ * the {@code ConcurrentLinkedQueue} in another thread.
+ *
+ * <p>This class is a member of the
+ * <a href="{@docRoot}/../guide/collections/index.html">
+ * Java Collections Framework</a>.
+ *
+ * @since 1.5
+ * @author Doug Lea
+ */
+public class ConcurrentLinkedQueue extends AbstractQueue implements Queue,
+ java.io.Serializable {
+ private static final long serialVersionUID = 196745693267521676L;
+ private final Object headLock = new SerializableLock();
+ private final Object tailLock = new SerializableLock();
+
+ /**
+ * Pointer to header node, initialized to a dummy node. The first
+ * actual node is at head.getNext().
+ */
+ private transient volatile Node head = new Node(null, null);
+
+ /** Pointer to last node on list **/
+ private transient volatile Node tail = head;
+
+ /**
+ * Creates a <tt>ConcurrentLinkedQueue</tt> that is initially empty.
+ */
+ public ConcurrentLinkedQueue() {
+ }
+
+ /**
+ * Creates a <tt>ConcurrentLinkedQueue</tt>
+ * initially containing the elements of the given collection,
+ * added in traversal order of the collection's iterator.
+ * @param c the collection of elements to initially contain
+ * @throws NullPointerException if the specified collection or any
+ * of its elements are null
+ */
+ public ConcurrentLinkedQueue(Collection c) {
+ for (Iterator it = c.iterator(); it.hasNext();)
+ add(it.next());
+ }
+
+ private boolean casTail(Node cmp, Node val) {
+ synchronized (tailLock) {
+ if (tail == cmp) {
+ tail = val;
+
+ return true;
+ } else {
+ return false;
+ }
+ }
+ }
+
+ private boolean casHead(Node cmp, Node val) {
+ synchronized (headLock) {
+ if (head == cmp) {
+ head = val;
+
+ return true;
+ } else {
+ return false;
+ }
+ }
+ }
+
+ // Have to override just to update the javadoc
+
+ /**
+ * Inserts the specified element at the tail of this queue.
+ *
+ * @return <tt>true</tt> (as specified by {@link Collection#add})
+ * @throws NullPointerException if the specified element is null
+ */
+ public boolean add(Object e) {
+ return offer(e);
+ }
+
+ /**
+ * Inserts the specified element at the tail of this queue.
+ *
+ * @return <tt>true</tt> (as specified by {@link Queue#offer})
+ * @throws NullPointerException if the specified element is null
+ */
+ public boolean offer(Object e) {
+ if (e == null) {
+ throw new NullPointerException();
+ }
+
+ Node n = new Node(e, null);
+
+ for (;;) {
+ Node t = tail;
+ Node s = t.getNext();
+
+ if (t == tail) {
+ if (s == null) {
+ if (t.casNext(s, n)) {
+ casTail(t, n);
+
+ return true;
+ }
+ } else {
+ casTail(t, s);
+ }
+ }
+ }
+ }
+
+ public Object poll() {
+ for (;;) {
+ Node h = head;
+ Node t = tail;
+ Node first = h.getNext();
+
+ if (h == head) {
+ if (h == t) {
+ if (first == null) {
+ return null;
+ } else {
+ casTail(t, first);
+ }
+ } else if (casHead(h, first)) {
+ Object item = first.getItem();
+
+ if (item != null) {
+ first.setItem(null);
+
+ return item;
+ }
+
+ // else skip over deleted item, continue loop,
+ }
+ }
+ }
+ }
+
+ public Object peek() { // same as poll except don't remove item
+
+ for (;;) {
+ Node h = head;
+ Node t = tail;
+ Node first = h.getNext();
+
+ if (h == head) {
+ if (h == t) {
+ if (first == null) {
+ return null;
+ } else {
+ casTail(t, first);
+ }
+ } else {
+ Object item = first.getItem();
+
+ if (item != null) {
+ return item;
+ } else { // remove deleted node and continue
+ casHead(h, first);
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Returns the first actual (non-header) node on list. This is yet
+ * another variant of poll/peek; here returning out the first
+ * node, not element (so we cannot collapse with peek() without
+ * introducing race.)
+ */
+ Node first() {
+ for (;;) {
+ Node h = head;
+ Node t = tail;
+ Node first = h.getNext();
+
+ if (h == head) {
+ if (h == t) {
+ if (first == null) {
+ return null;
+ } else {
+ casTail(t, first);
+ }
+ } else {
+ if (first.getItem() != null) {
+ return first;
+ } else { // remove deleted node and continue
+ casHead(h, first);
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Returns <tt>true</tt> if this queue contains no elements.
+ *
+ * @return <tt>true</tt> if this queue contains no elements
+ */
+ public boolean isEmpty() {
+ return first() == null;
+ }
+
+ /**
+ * Returns the number of elements in this queue. If this queue
+ * contains more than <tt>Integer.MAX_VALUE</tt> elements, returns
+ * <tt>Integer.MAX_VALUE</tt>.
+ *
+ * <p>Beware that, unlike in most collections, this method is
+ * <em>NOT</em> a constant-time operation. Because of the
+ * asynchronous nature of these queues, determining the current
+ * number of elements requires an O(n) traversal.
+ *
+ * @return the number of elements in this queue
+ */
+ public int size() {
+ int count = 0;
+
+ for (Node p = first(); p != null; p = p.getNext()) {
+ if (p.getItem() != null) {
+ // Collections.size() spec says to max out
+ if (++count == Integer.MAX_VALUE) {
+ break;
+ }
+ }
+ }
+
+ return count;
+ }
+
+ /**
+ * Returns <tt>true</tt> if this queue contains the specified element.
+ * More formally, returns <tt>true</tt> if and only if this queue contains
+ * at least one element <tt>e</tt> such that <tt>o.equals(e)</tt>.
+ *
+ * @param o object to be checked for containment in this queue
+ * @return <tt>true</tt> if this queue contains the specified element
+ */
+ public boolean contains(Object o) {
+ if (o == null) {
+ return false;
+ }
+
+ for (Node p = first(); p != null; p = p.getNext()) {
+ Object item = p.getItem();
+
+ if ((item != null) && o.equals(item)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ /**
+ * Removes a single instance of the specified element from this queue,
+ * if it is present. More formally, removes an element <tt>e</tt> such
+ * that <tt>o.equals(e)</tt>, if this queue contains one or more such
+ * elements.
+ * Returns <tt>true</tt> if this queue contained the specified element
+ * (or equivalently, if this queue changed as a result of the call).
+ *
+ * @param o element to be removed from this queue, if present
+ * @return <tt>true</tt> if this queue changed as a result of the call
+ */
+ public boolean remove(Object o) {
+ if (o == null) {
+ return false;
+ }
+
+ for (Node p = first(); p != null; p = p.getNext()) {
+ Object item = p.getItem();
+
+ if ((item != null) && o.equals(item) && p.casItem(item, null)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ /**
+ * Returns an iterator over the elements in this queue in proper sequence.
+ * The returned iterator is a "weakly consistent" iterator that
+ * will never throw {@link ConcurrentModificationException},
+ * and guarantees to traverse elements as they existed upon
+ * construction of the iterator, and may (but is not guaranteed to)
+ * reflect any modifications subsequent to construction.
+ *
+ * @return an iterator over the elements in this queue in proper sequence
+ */
+ public Iterator iterator() {
+ return new Itr();
+ }
+
+ /**
+ * Save the state to a stream (that is, serialize it).
+ *
+ * @serialData All of the elements (each an <tt>E</tt>) in
+ * the proper order, followed by a null
+ * @param s the stream
+ */
+ private void writeObject(java.io.ObjectOutputStream s)
+ throws java.io.IOException {
+ // Write out any hidden stuff
+ s.defaultWriteObject();
+
+ // Write out all elements in the proper order.
+ for (Node p = first(); p != null; p = p.getNext()) {
+ Object item = p.getItem();
+
+ if (item != null) {
+ s.writeObject(item);
+ }
+ }
+
+ // Use trailing null as sentinel
+ s.writeObject(null);
+ }
+
+ /**
+ * Reconstitute the Queue instance from a stream (that is,
+ * deserialize it).
+ * @param s the stream
+ */
+ private void readObject(java.io.ObjectInputStream s)
+ throws java.io.IOException, ClassNotFoundException {
+ // Read in capacity, and any hidden stuff
+ s.defaultReadObject();
+
+ head = new Node(null, null);
+ tail = head;
+
+ // Read in all elements and place in queue
+ for (;;) {
+ Object item = s.readObject();
+
+ if (item == null) {
+ break;
+ } else {
+ offer(item);
+ }
+ }
+ }
+
+ /*
+ * This is a straight adaptation of Michael & Scott algorithm.
+ * For explanation, read the paper. The only (minor) algorithmic
+ * difference is that this version supports lazy deletion of
+ * internal nodes (method remove(Object)) -- remove CAS'es item
+ * fields to null. The normal queue operations unlink but then
+ * pass over nodes with null item fields. Similarly, iteration
+ * methods ignore those with nulls.
+ *
+ * Also note that like most non-blocking algorithms in this
+ * package, this implementation relies on the fact that in garbage
+ * collected systems, there is no possibility of ABA problems due
+ * to recycled nodes, so there is no need to use "counted
+ * pointers" or related techniques seen in versions used in
+ * non-GC'ed settings.
+ */
+ private static class Node {
+ private volatile Object item;
+ private volatile Node next;
+
+ Node(Object x) {
+ item = x;
+ }
+
+ Node(Object x, Node n) {
+ item = x;
+ next = n;
+ }
+
+ Object getItem() {
+ return item;
+ }
+
+ synchronized boolean casItem(Object cmp, Object val) {
+ if (item == cmp) {
+ item = val;
+
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ synchronized void setItem(Object val) {
+ item = val;
+ }
+
+ Node getNext() {
+ return next;
+ }
+
+ synchronized boolean casNext(Node cmp, Node val) {
+ if (next == cmp) {
+ next = val;
+
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ synchronized void setNext(Node val) {
+ next = val;
+ }
+ }
+
+ private class Itr implements Iterator {
+ /**
+ * Next node to return item for.
+ */
+ private Node nextNode;
+
+ /**
+ * nextItem holds on to item fields because once we claim
+ * that an element exists in hasNext(), we must return it in
+ * the following next() call even if it was in the process of
+ * being removed when hasNext() was called.
+ */
+ private Object nextItem;
+
+ /**
+ * Node of the last returned item, to support remove.
+ */
+ private Node lastRet;
+
+ Itr() {
+ advance();
+ }
+
+ /**
+ * Moves to next valid node and returns item to return for
+ * next(), or null if no such.
+ */
+ private Object advance() {
+ lastRet = nextNode;
+
+ Object x = nextItem;
+
+ Node p = (nextNode == null) ? first() : nextNode.getNext();
+
+ for (;;) {
+ if (p == null) {
+ nextNode = null;
+ nextItem = null;
+
+ return x;
+ }
+
+ Object item = p.getItem();
+
+ if (item != null) {
+ nextNode = p;
+ nextItem = item;
+
+ return x;
+ } else { // skip over nulls
+ p = p.getNext();
+ }
+ }
+ }
+
+ public boolean hasNext() {
+ return nextNode != null;
+ }
+
+ public Object next() {
+ if (nextNode == null) {
+ throw new NoSuchElementException();
+ }
+
+ return advance();
+ }
+
+ public void remove() {
+ Node l = lastRet;
+
+ if (l == null) {
+ throw new IllegalStateException();
+ }
+
+ // rely on a future traversal to relink.
+ l.setItem(null);
+ lastRet = null;
+ }
+ }
+
+ private static class SerializableLock implements Serializable {
+ }
+}
Propchange: incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentLinkedQueue.java
------------------------------------------------------------------------------
svn:executable = *
Added: incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentMap.java
URL: http://svn.apache.org/viewvc/incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentMap.java?rev=417856&view=auto
==============================================================================
--- incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentMap.java (added)
+++ incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentMap.java Wed Jun 28 12:34:33 2006
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.openjpa.lib.util.concurrent;
+
+import java.util.*;
+
+
+/**
+ * <p>A highly concurrent map.</p>
+ *
+ * @author Abe White
+ */
+public interface ConcurrentMap extends Map {
+ /**
+ * Remove an arbitrary (not strictly random) entry from the map. This
+ * allows implementation of concurrent caches with size ceilings.
+ *
+ * @return the removed entry, or null if map is empty
+ */
+ public Map.Entry removeRandom();
+
+ /**
+ * Iterate over map entries, beginning at an arbitrary
+ * (not strictly random) entry.
+ */
+ public Iterator randomEntryIterator();
+}
Propchange: incubator/openjpa/trunk/openjpa-lib/main/java/org/apache/openjpa/lib/util/concurrent/ConcurrentMap.java
------------------------------------------------------------------------------
svn:executable = *