You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@harmony.apache.org by nd...@apache.org on 2007/04/18 07:29:12 UTC

svn commit: r529875 - /harmony/enhanced/drlvm/trunk/vm/vmcore/src/thread/linux/atomics.cpp

Author: ndbeyer
Date: Tue Apr 17 22:29:10 2007
New Revision: 529875

URL: http://svn.apache.org/viewvc?view=rev&rev=529875
Log:
Make assembly instructions consistent; clobber all memory registers

Modified:
    harmony/enhanced/drlvm/trunk/vm/vmcore/src/thread/linux/atomics.cpp

Modified: harmony/enhanced/drlvm/trunk/vm/vmcore/src/thread/linux/atomics.cpp
URL: http://svn.apache.org/viewvc/harmony/enhanced/drlvm/trunk/vm/vmcore/src/thread/linux/atomics.cpp?view=diff&rev=529875&r1=529874&r2=529875
==============================================================================
--- harmony/enhanced/drlvm/trunk/vm/vmcore/src/thread/linux/atomics.cpp (original)
+++ harmony/enhanced/drlvm/trunk/vm/vmcore/src/thread/linux/atomics.cpp Tue Apr 17 22:29:10 2007
@@ -19,9 +19,9 @@
 
 void MemoryReadWriteBarrier() {
 #if defined(_EM64T_)
-    asm volatile ("mfence");
+    asm volatile ("mfence" : : : "memory");
 #elif defined(_IPF_)
-    asm volatile ("mf" ::: "memory");
+    asm volatile ("mf" : : : "memory");
 #else // General x86 case
     /*
      * This code must use a lock-prefixed assembly instruction, so that 
@@ -41,7 +41,7 @@
 
 void MemoryWriteBarrier() {
 #if defined(_IPF_)
-    asm volatile ("mf" ::: "memory");
+    asm volatile ("mf" : : : "memory");
 #else // General x86 and x86_64 case
     /*
      * We could use the same lock-prefixed assembly instruction above,