You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@openwhisk.apache.org by cb...@apache.org on 2017/08/18 08:47:09 UTC

[incubator-openwhisk] branch master updated: Remove spray-caching dependency. (#2628)

This is an automated email from the ASF dual-hosted git repository.

cbickel pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-openwhisk.git


The following commit(s) were added to refs/heads/master by this push:
     new 68c5e35  Remove spray-caching dependency. (#2628)
68c5e35 is described below

commit 68c5e35a25b6dbdbf8a58f24384204b5a57edce6
Author: Markus Thömmes <ma...@me.com>
AuthorDate: Fri Aug 18 10:47:07 2017 +0200

    Remove spray-caching dependency. (#2628)
    
    Spray-caching is the caching module of the by us no longer used spray-http server. Our own implementation of the cache is there already anyway and keeping spray-caching binds us to Scala 2.11.
---
 common/scala/build.gradle                          |  1 -
 .../scala/whisk/core/database/InMemoryCache.scala  | 73 ----------------------
 .../MultipleReadersSingleWriterCache.scala         | 46 +++++++++-----
 3 files changed, 29 insertions(+), 91 deletions(-)

diff --git a/common/scala/build.gradle b/common/scala/build.gradle
index e1d0535..2dc406a 100644
--- a/common/scala/build.gradle
+++ b/common/scala/build.gradle
@@ -11,7 +11,6 @@ repositories {
 dependencies {
     compile "org.scala-lang:scala-library:${gradle.scala.version}"
 
-    compile 'io.spray:spray-caching_2.11:1.3.4'
     compile 'io.spray:spray-json_2.11:1.3.3'
 
     compile 'com.typesafe.akka:akka-actor_2.11:2.4.16'
diff --git a/common/scala/src/main/scala/whisk/core/database/InMemoryCache.scala b/common/scala/src/main/scala/whisk/core/database/InMemoryCache.scala
deleted file mode 100644
index fd2b3c9..0000000
--- a/common/scala/src/main/scala/whisk/core/database/InMemoryCache.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package whisk.core.database
-
-import scala.concurrent.Future
-import spray.caching.Cache
-import spray.caching.LruCache
-import whisk.common.Logging
-import whisk.common.TransactionId
-import scala.concurrent.ExecutionContext
-import whisk.common.LoggingMarkers
-
-trait InMemoryCache[W] {
-
-    /** Toggle to enable/disable caching. */
-    protected def cacheEnabled = false
-
-    protected def cacheKeys(w: W): Set[Any] = Set(w)
-
-    protected def cacheInvalidate(keys: Set[Any])(
-        implicit transid: TransactionId, logger: Logging): Unit = {
-        if (cacheEnabled) {
-            logger.info(this, s"invalidating $keys")
-            keys foreach { k => cache remove k }
-        }
-    }
-
-    protected def cacheLookup[Wsuper >: W](
-        datastore: ArtifactStore[Wsuper],
-        key: Any,
-        future: => Future[W],
-        fromCache: Boolean = cacheEnabled)(
-            implicit transid: TransactionId, logger: Logging) = {
-        if (fromCache) {
-            implicit val ec = datastore.executionContext
-            cache.get(key) map { v =>
-                transid.mark(this, LoggingMarkers.DATABASE_CACHE_HIT, s"[GET] serving from cache: $key")(logger)
-                v
-            } getOrElse {
-                transid.mark(this, LoggingMarkers.DATABASE_CACHE_MISS, s"[GET] serving from datastore: $key")(logger)
-                future flatMap {
-                    // cache result of future iff it was successful
-                    cache(key)(_)
-                }
-            }
-        } else future
-    }
-
-    protected def cacheUpdate(keys: Set[Any], w: W)(
-        implicit transid: TransactionId, logger: Logging, ec: ExecutionContext) = {
-        if (cacheEnabled) {
-            logger.info(this, s"caching $keys")
-            keys foreach { cache(_) { w } }
-        }
-    }
-
-    private val cache: Cache[W] = LruCache()
-}
diff --git a/common/scala/src/main/scala/whisk/core/database/MultipleReadersSingleWriterCache.scala b/common/scala/src/main/scala/whisk/core/database/MultipleReadersSingleWriterCache.scala
index 79f2ac8..d824948 100644
--- a/common/scala/src/main/scala/whisk/core/database/MultipleReadersSingleWriterCache.scala
+++ b/common/scala/src/main/scala/whisk/core/database/MultipleReadersSingleWriterCache.scala
@@ -15,6 +15,11 @@
  * limitations under the License.
  */
 
+/*
+ * Cache base implementation:
+ * Copyright (C) 2017 Lightbend Inc. <http://www.lightbend.com/>
+ */
+
 package whisk.core.database
 
 import java.util.concurrent.atomic.AtomicReference
@@ -26,13 +31,13 @@ import scala.concurrent.Future
 import scala.concurrent.Promise
 import scala.util.Failure
 import scala.util.Success
+import scala.language.implicitConversions
 
-import spray.caching.Cache
-import spray.caching.ValueMagnet.fromAny
 import whisk.common.Logging
 import whisk.common.LoggingMarkers
 import whisk.common.TransactionId
 import com.github.benmanes.caffeine.cache.Caffeine
+import scala.util.control.NonFatal
 
 /**
  * A cache that allows multiple readers, but only a single writer, at
@@ -411,7 +416,7 @@ trait MultipleReadersSingleWriterCache[W, Winfo] {
     }
 
     /** This is the backing store. */
-    private val cache: Cache[Entry] = new ConcurrentMapBackedCache(
+    private val cache: ConcurrentMapBackedCache[Entry] = new ConcurrentMapBackedCache(
         Caffeine.newBuilder().asInstanceOf[Caffeine[Any, Future[Entry]]]
             .expireAfterWrite(5, TimeUnit.MINUTES)
             .softValues()
@@ -422,35 +427,42 @@ trait MultipleReadersSingleWriterCache[W, Winfo] {
  * A thread-safe implementation of [[spray.caching.cache]] backed by a plain
  * [[java.util.concurrent.ConcurrentMap]].
  *
- * The implementation is entirely copied from [[spray.caching.SimpleLruCache]]
- * the only difference being the store type. Implementation is identical.
- *
- * The unimplemented methods are not needed for our internal implementation.
+ * The implementation is entirely copied from Spray's [[spray.caching.Cache]] and
+ * [[spray.caching.SimpleLruCache]] respectively, the only difference being the store type.
+ * Implementation otherwise is identical.
  */
-private class ConcurrentMapBackedCache[V](store: ConcurrentMap[Any, Future[V]]) extends Cache[V] {
+private class ConcurrentMapBackedCache[V](store: ConcurrentMap[Any, Future[V]]) {
+    val cache = this
 
-    def get(key: Any) = Option(store.get(key))
+    def apply(key: Any) = new Keyed(key)
 
-    def apply(key: Any, genValue: () ⇒ Future[V])(implicit ec: ExecutionContext): Future[V] = {
+    class Keyed(key: Any) {
+        def apply(magnet: => ValueMagnet[V])(implicit ec: ExecutionContext): Future[V] =
+            cache.apply(key, () => try magnet.future catch { case NonFatal(e) => Future.failed(e) })
+    }
+
+    def apply(key: Any, genValue: () => Future[V])(implicit ec: ExecutionContext): Future[V] = {
         val promise = Promise[V]()
         store.putIfAbsent(key, promise.future) match {
-            case null ⇒
+            case null =>
                 val future = genValue()
-                future.onComplete { value ⇒
+                future.onComplete { value =>
                     promise.complete(value)
                     // in case of exceptions we remove the cache entry (i.e. try again later)
                     if (value.isFailure) store.remove(key, promise.future)
                 }
                 future
-            case existingFuture ⇒ existingFuture
+            case existingFuture => existingFuture
         }
     }
 
     def remove(key: Any) = Option(store.remove(key))
 
-    def clear(): Unit = store.clear()
-    def keys: Set[Any] = ???
-    def ascendingKeys(limit: Option[Int] = None) = ???
-
     def size = store.size
 }
+
+class ValueMagnet[V](val future: Future[V])
+object ValueMagnet {
+    implicit def fromAny[V](block: V): ValueMagnet[V] = fromFuture(Future.successful(block))
+    implicit def fromFuture[V](future: Future[V]): ValueMagnet[V] = new ValueMagnet(future)
+}

-- 
To stop receiving notification emails like this one, please contact
['"commits@openwhisk.apache.org" <co...@openwhisk.apache.org>'].