From 02cde662701d1ca0582d3b7ce4c381a6d84672f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Fri, 27 May 2016 21:24:33 +0200 Subject: [PATCH 01/40] Depend on monix-eval --- build.sbt | 1 + 1 file changed, 1 insertion(+) diff --git a/build.sbt b/build.sbt index e6952e3d..cd99f3f8 100644 --- a/build.sbt +++ b/build.sbt @@ -18,6 +18,7 @@ lazy val commonSettings = Seq( resolvers += Resolver.sonatypeRepo("releases"), libraryDependencies ++= Seq( "org.typelevel" %%% "cats" % "0.6.0", + "io.monix" %%% "monix-eval" % "2.0-RC3", "org.scalatest" %%% "scalatest" % "3.0.0-M7" % "test", compilerPlugin( "org.spire-math" %% "kind-projector" % "0.7.1" From 8b43e767d0c0e8c28a74116b99cebfb8c846c1a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sat, 28 May 2016 00:34:00 +0200 Subject: [PATCH 02/40] Update DataSource to be implemented in terms of Task --- shared/src/main/scala/cache.scala | 21 + shared/src/main/scala/datasource.scala | 13 +- shared/src/main/scala/fetch.scala | 52 +- shared/src/main/scala/implicits.scala | 46 +- shared/src/main/scala/interpreters.scala | 278 +++++----- shared/src/main/scala/syntax.scala | 26 +- shared/src/test/scala/FetchFutureTests.scala | 136 +++++ shared/src/test/scala/FetchTests.scala | 549 +++++++++---------- 8 files changed, 595 insertions(+), 526 deletions(-) create mode 100644 shared/src/test/scala/FetchFutureTests.scala diff --git a/shared/src/main/scala/cache.scala b/shared/src/main/scala/cache.scala index 08ed9eee..1cbaf1e3 100644 --- a/shared/src/main/scala/cache.scala +++ b/shared/src/main/scala/cache.scala @@ -30,3 +30,24 @@ trait DataSourceCache { }) } } + +/** + * A cache that stores its elements in memory. + */ +case class InMemoryCache(state: Map[DataSourceIdentity, Any]) extends DataSourceCache { + override def get(k: DataSourceIdentity): Option[Any] = + state.get(k) + + override def update[A](k: DataSourceIdentity, v: A): InMemoryCache = + copy(state = state.updated(k, v)) +} + +object InMemoryCache { + def empty: InMemoryCache = InMemoryCache(Map.empty[DataSourceIdentity, Any]) + + def apply(results: (DataSourceIdentity, Any)*): InMemoryCache = + InMemoryCache( + results.foldLeft(Map.empty[DataSourceIdentity, Any])({ + case (c, (k, v)) => c.updated(k, v) + })) +} diff --git a/shared/src/main/scala/datasource.scala b/shared/src/main/scala/datasource.scala index 60c51229..49e1e248 100644 --- a/shared/src/main/scala/datasource.scala +++ b/shared/src/main/scala/datasource.scala @@ -16,7 +16,7 @@ package fetch -import cats.Eval +import monix.eval.Task import cats.data.NonEmptyList import cats.std.list._ @@ -40,21 +40,20 @@ trait DataSource[I, A] { /** Fetch one identity, returning a None if it wasn't found. */ - def fetchOne(id: I): Eval[Option[A]] + def fetchOne(id: I): Task[Option[A]] /** Fetch many identities, returning a mapping from identities to results. If an * identity wasn't found won't appear in the keys. */ - def fetchMany(ids: NonEmptyList[I]): Eval[Map[I, A]] + def fetchMany(ids: NonEmptyList[I]): Task[Map[I, A]] /** Use `fetchOne` for implementing of `fetchMany`. Use only when the data * source doesn't support batching. */ - def batchingNotSupported(ids: NonEmptyList[I]): Eval[Map[I, A]] = { + def batchingNotSupported(ids: NonEmptyList[I]): Task[Map[I, A]] = { val idsList = ids.unwrap - idsList - .map(fetchOne) - .sequence + Task + .sequence(idsList.map(fetchOne)) .map(results => { (idsList zip results) .collect({ diff --git a/shared/src/main/scala/fetch.scala b/shared/src/main/scala/fetch.scala index 91fab7e3..a9cf1aee 100644 --- a/shared/src/main/scala/fetch.scala +++ b/shared/src/main/scala/fetch.scala @@ -18,6 +18,8 @@ package fetch import scala.collection.immutable.Map +import monix.eval.Task + import cats.{Applicative, Monad, MonadError, ~>} import cats.data.{StateT, Const, NonEmptyList} import cats.free.{Free} @@ -38,7 +40,6 @@ final case class Concurrent(as: List[FetchMany[_, _]]) extends FetchOp[DataSourc final case class FetchError[A, E <: Throwable](err: E) extends FetchOp[A] object `package` { - type DataSourceName = String type DataSourceIdentity = (DataSourceName, Any) @@ -47,10 +48,16 @@ object `package` { type FetchMonadError[M[_]] = MonadError[M, Throwable] - type FetchInterpreter[M[_]] = { - type f[x] = StateT[M, FetchEnv, x] - } + type FetchInterpreter[A] = StateT[Task, FetchEnv, A] + + implicit val taskMonad: Monad[Task] = new Monad[Task] with Applicative[Task] { + def pure[A](x: A): Task[A] = Task.pure(x) + override def ap[A, B](ff: Task[A => B])(fa: Task[A]): Task[B] = + Task.mapBoth(ff, fa)((f, a) => f(a)) + + def flatMap[A, B](fa: Task[A])(f: A => Task[B]): Task[B] = fa.flatMap(f) + } implicit val fetchApplicative: Applicative[Fetch] = new Applicative[Fetch] { def pure[A](a: A): Fetch[A] = Fetch.pure(a) @@ -212,47 +219,24 @@ object `package` { } yield result } - class FetchRunner[M[_]] { - - def apply[A]( - fa: Fetch[A], - cache: DataSourceCache = InMemoryCache.empty - )( - implicit MM: MonadError[M, Throwable] - ): M[(FetchEnv, A)] = - fa.foldMap[FetchInterpreter[M]#f](interpreter).run(FetchEnv(cache)) - } - - class FetchRunnerEnv[M[_]] { - - def apply[A](fa: Fetch[A], cache: DataSourceCache = InMemoryCache.empty)( - implicit MM: MonadError[M, Throwable] - ): M[FetchEnv] = - fa.foldMap[FetchInterpreter[M]#f](interpreter).runS(FetchEnv(cache)) - } - - class FetchRunnerA[M[_]] { - - def apply[A](fa: Fetch[A], cache: DataSourceCache = InMemoryCache.empty)( - implicit MM: MonadError[M, Throwable] - ): M[A] = - fa.foldMap[FetchInterpreter[M]#f](interpreter).runA(FetchEnv(cache)) - } - /** * Run a `Fetch` with the given cache, returning a pair of the final environment and result * in the monad `M`. */ - def runFetch[M[_]]: FetchRunner[M] = new FetchRunner[M] + def runFetch[A]( + fa: Fetch[A], cache: DataSourceCache = InMemoryCache.empty): Task[(FetchEnv, A)] = + fa.foldMap(interpreter).run(FetchEnv(cache)) /** * Run a `Fetch` with the given cache, returning the final environment in the monad `M`. */ - def runEnv[M[_]]: FetchRunnerEnv[M] = new FetchRunnerEnv[M] + def runEnv[A](fa: Fetch[A], cache: DataSourceCache = InMemoryCache.empty): Task[FetchEnv] = + runFetch(fa, cache).map(_._1) /** * Run a `Fetch` with the given cache, the result in the monad `M`. */ - def run[M[_]]: FetchRunnerA[M] = new FetchRunnerA[M] + def run[A](fa: Fetch[A], cache: DataSourceCache = InMemoryCache.empty): Task[A] = + runFetch(fa, cache).map(_._2) } } diff --git a/shared/src/main/scala/implicits.scala b/shared/src/main/scala/implicits.scala index 606b17b0..b500d86d 100644 --- a/shared/src/main/scala/implicits.scala +++ b/shared/src/main/scala/implicits.scala @@ -16,48 +16,4 @@ package fetch -import cats.{Eval, MonadError} - -/** - * A cache that stores its elements in memory. - */ -case class InMemoryCache(state: Map[DataSourceIdentity, Any]) extends DataSourceCache { - override def get(k: DataSourceIdentity): Option[Any] = - state.get(k) - - override def update[A](k: DataSourceIdentity, v: A): InMemoryCache = - copy(state = state.updated(k, v)) -} - -object InMemoryCache { - def empty: InMemoryCache = InMemoryCache(Map.empty[DataSourceIdentity, Any]) - - def apply(results: (DataSourceIdentity, Any)*): InMemoryCache = - InMemoryCache( - results.foldLeft(Map.empty[DataSourceIdentity, Any])({ - case (c, (k, v)) => c.updated(k, v) - })) -} - -object implicits { - implicit val evalMonadError: MonadError[Eval, Throwable] = new MonadError[Eval, Throwable] { - override def pure[A](x: A): Eval[A] = Eval.now(x) - - override def map[A, B](fa: Eval[A])(f: A ⇒ B): Eval[B] = fa.map(f) - - override def flatMap[A, B](fa: Eval[A])(ff: A => Eval[B]): Eval[B] = - fa.flatMap(ff) - - override def raiseError[A](e: Throwable): Eval[A] = - Eval.later({ throw e }) - - override def handleErrorWith[A](fa: Eval[A])(f: Throwable ⇒ Eval[A]): Eval[A] = - Eval.later({ - try { - fa.value - } catch { - case e: Throwable => f(e).value - } - }) - } -} +object implicits {} diff --git a/shared/src/main/scala/interpreters.scala b/shared/src/main/scala/interpreters.scala index a723bc56..e07d4d54 100644 --- a/shared/src/main/scala/interpreters.scala +++ b/shared/src/main/scala/interpreters.scala @@ -16,14 +16,14 @@ package fetch -import cats.{MonadError, ~>} -import cats.data.{StateT, NonEmptyList} - import scala.collection.immutable._ +import monix.eval.Task + +import cats.{MonadError, ~>} +import cats.data.{StateT, NonEmptyList} import cats.std.option._ import cats.std.list._ - import cats.syntax.traverse._ /** @@ -33,19 +33,17 @@ case class FetchFailure(env: Env) extends Throwable trait FetchInterpreters { - def interpreter[I, M[_]]( - implicit MM: MonadError[M, Throwable] - ): FetchOp ~> FetchInterpreter[M]#f = { - def dedupeIds[I, A, M[_]](ids: NonEmptyList[I], ds: DataSource[I, A], cache: DataSourceCache) = { + def interpreter[I]: FetchOp ~> FetchInterpreter = { + def dedupeIds[I, A](ids: NonEmptyList[I], ds: DataSource[I, A], cache: DataSourceCache) = { ids.unwrap.distinct.filterNot(i => cache.get(ds.identity(i)).isDefined) } - new (FetchOp ~> FetchInterpreter[M]#f) { - def apply[A](fa: FetchOp[A]): FetchInterpreter[M]#f[A] = { - StateT[M, FetchEnv, A] { env: FetchEnv => + new (FetchOp ~> FetchInterpreter) { + def apply[A](fa: FetchOp[A]): FetchInterpreter[A] = { + StateT[Task, FetchEnv, A] { env: FetchEnv => fa match { - case FetchError(e) => MM.raiseError(e) - case Cached(a) => MM.pure((env, a)) + case FetchError(e) => Task.raiseError(e) + case Cached(a) => Task.pure((env, a)) case Concurrent(manies) => { val startRound = System.nanoTime() val cache = env.cache @@ -57,9 +55,9 @@ trait FetchInterpreters { case (ds, ids) => ( ds, - dedupeIds[I, A, M](ids.asInstanceOf[NonEmptyList[I]], - ds.asInstanceOf[DataSource[I, A]], - cache) + dedupeIds[I, A](ids.asInstanceOf[NonEmptyList[I]], + ds.asInstanceOf[DataSource[I, A]], + cache) ) }) .collect({ @@ -67,107 +65,106 @@ trait FetchInterpreters { }) if (sourcesAndIds.isEmpty) - MM.pure((env, env.cache.asInstanceOf[A])) + Task.pure((env, env.cache.asInstanceOf[A])) else - MM.flatMap( - sourcesAndIds - .map({ + Task + .sequence(sourcesAndIds.map({ case (ds, as) => - MM.pureEval(ds - .asInstanceOf[DataSource[I, A]] - .fetchMany(as.asInstanceOf[NonEmptyList[I]])) - }) - .sequence)((results: List[Map[_, _]]) => { - val endRound = System.nanoTime() - val newCache = (sources zip results).foldLeft(cache)((accache, resultset) => { - val (ds, resultmap) = resultset - val tresults = resultmap.asInstanceOf[Map[I, A]] - val tds = ds.asInstanceOf[DataSource[I, A]] - accache.cacheResults[I, A](tresults, tds) - }) - val newEnv = env.next( - newCache, - Round( - cache, - "Concurrent", - ConcurrentRound( - sourcesAndIds - .map({ - case (ds, as) => (ds.name, as.unwrap) - }) - .toMap - ), - startRound, - endRound - ), - Nil - ) + ds.asInstanceOf[DataSource[I, A]] + .fetchMany(as.asInstanceOf[NonEmptyList[I]]) + })) + .flatMap((results: List[Map[_, _]]) => { + val endRound = System.nanoTime() + val newCache = + (sources zip results).foldLeft(cache)((accache, resultset) => { + val (ds, resultmap) = resultset + val tresults = resultmap.asInstanceOf[Map[I, A]] + val tds = ds.asInstanceOf[DataSource[I, A]] + accache.cacheResults[I, A](tresults, tds) + }) + val newEnv = env.next( + newCache, + Round( + cache, + "Concurrent", + ConcurrentRound( + sourcesAndIds + .map({ + case (ds, as) => (ds.name, as.unwrap) + }) + .toMap + ), + startRound, + endRound + ), + Nil + ) - val allFetched = (sourcesAndIds zip results).forall({ - case ((_, theIds), results) => theIds.unwrap.size == results.size - case _ => false - }) + val allFetched = (sourcesAndIds zip results).forall({ + case ((_, theIds), results) => theIds.unwrap.size == results.size + case _ => false + }) - if (allFetched) { - // since user-provided caches may discard elements, we use an in-memory - // cache to gather these intermediate results that will be used for - // concurrent optimizations. - val cachedResults = - (sources zip results).foldLeft(InMemoryCache.empty)((cach, resultSet) => { + if (allFetched) { + // since user-provided caches may discard elements, we use an in-memory + // cache to gather these intermediate results that will be used for + // concurrent optimizations. + val cachedResults = (sources zip results).foldLeft(InMemoryCache.empty)( + (cach, resultSet) => { val (ds, resultmap) = resultSet val tresults = resultmap.asInstanceOf[Map[I, A]] val tds = ds.asInstanceOf[DataSource[I, A]] cach.cacheResults[I, A](tresults, tds).asInstanceOf[InMemoryCache] }) - MM.pure((newEnv, cachedResults.asInstanceOf[A])) - } else { - MM.raiseError(FetchFailure(newEnv)) - } - }) + Task.pure((newEnv, cachedResults.asInstanceOf[A])) + } else { + Task.raiseError(FetchFailure(newEnv)) + } + }) } case FetchOne(id, ds) => { val startRound = System.nanoTime() val cache = env.cache cache .get(ds.identity(id)) - .fold[M[(FetchEnv, A)]]( - MM.flatMap(MM.pureEval(ds.fetchOne(id)).asInstanceOf[M[Option[A]]])( - (res: Option[A]) => { - val endRound = System.nanoTime() - res.fold[M[(FetchEnv, A)]]( - MM.raiseError( - FetchFailure( - env.next( - cache, - Round(cache, - ds.name, - OneRound(id), - startRound, - endRound), - List(id) - ) - ) - ) - )(result => { + .fold[Task[(FetchEnv, A)]]( + ds.fetchOne(id) + .flatMap((res: Option[A]) => { val endRound = System.nanoTime() - val newCache = cache.update(ds.identity(id), result) - MM.pure( - (env.next( - newCache, - Round(cache, - ds.name, - OneRound(id), - startRound, - endRound), - List(id) - ), - result) - ) + res.fold[Task[(FetchEnv, A)]]( + Task.raiseError( + FetchFailure( + env.next( + cache, + Round(cache, + ds.name, + OneRound(id), + startRound, + endRound), + List(id) + ) + ) + ) + )(result => { + val endRound = System.nanoTime() + val newCache = cache.update(ds.identity(id), result) + Task.pure( + (env.next( + newCache, + Round(cache, + ds.name, + OneRound(id), + startRound, + endRound), + List(id) + ), + result) + ) + }) }) - }) )(cached => { val endRound = System.nanoTime() - MM.pure( + Task.pure( (env.next( cache, Round(cache, @@ -186,9 +183,9 @@ trait FetchInterpreters { val startRound = System.nanoTime() val cache = env.cache val oldIds = ids.unwrap.distinct - val newIds = dedupeIds[Any, Any, Any](ids, ds, cache) + val newIds = dedupeIds[Any, Any](ids, ds, cache) if (newIds.isEmpty) - MM.pure( + Task.pure( (env.next( cache, Round(cache, @@ -202,47 +199,48 @@ trait FetchInterpreters { ids.unwrap.flatMap(id => cache.get(ds.identity(id)))) ) else { - MM.flatMap(MM - .pureEval(ds.fetchMany(NonEmptyList(newIds(0), newIds.tail))) - .asInstanceOf[M[Map[I, A]]])((res: Map[I, A]) => { - val endRound = System.nanoTime() - ids.unwrap - .map(i => res.get(i.asInstanceOf[I])) - .sequence - .fold[M[(FetchEnv, A)]]( - MM.raiseError( - FetchFailure( - env.next( - cache, - Round(cache, - ds.name, - ManyRound(ids.unwrap), - startRound, - endRound), - newIds - ) - ) + ds.asInstanceOf[DataSource[I, A]] + .fetchMany( + NonEmptyList(newIds(0).asInstanceOf[I], newIds.tail.asInstanceOf[List[I]])) + .flatMap((res: Map[I, A]) => { + val endRound = System.nanoTime() + ids.unwrap + .map(i => res.get(i.asInstanceOf[I])) + .sequence + .fold[Task[(FetchEnv, A)]]( + Task.raiseError( + FetchFailure( + env.next( + cache, + Round(cache, + ds.name, + ManyRound(ids.unwrap), + startRound, + endRound), + newIds + ) + ) + ) + )(results => { + val endRound = System.nanoTime() + val newCache = + cache.cacheResults[I, A](res, ds.asInstanceOf[DataSource[I, A]]) + val someCached = oldIds.size == newIds.size + Task.pure( + (env.next( + newCache, + Round(cache, + ds.name, + ManyRound(ids.unwrap), + startRound, + endRound, + someCached), + newIds + ), + results) ) - )(results => { - val endRound = System.nanoTime() - val newCache = - cache.cacheResults[I, A](res, ds.asInstanceOf[DataSource[I, A]]) - val someCached = oldIds.size == newIds.size - MM.pure( - (env.next( - newCache, - Round(cache, - ds.name, - ManyRound(ids.unwrap), - startRound, - endRound, - someCached), - newIds - ), - results) - ) - }) - }) + }) + }) } } } diff --git a/shared/src/main/scala/syntax.scala b/shared/src/main/scala/syntax.scala index 9632fab2..4b7dc1a3 100644 --- a/shared/src/main/scala/syntax.scala +++ b/shared/src/main/scala/syntax.scala @@ -16,6 +16,8 @@ package fetch +import monix.eval.Task + object syntax { /** Implicit syntax to lift any value to the context of Fetch via pure */ @@ -38,22 +40,22 @@ object syntax { def join[B](fb: Fetch[B]): Fetch[(A, B)] = Fetch.join(fa, fb) - def runF[M[_]: FetchMonadError]: M[(FetchEnv, A)] = - Fetch.runFetch[M](fa, InMemoryCache.empty) + def runF: Task[(FetchEnv, A)] = + Fetch.runFetch(fa, InMemoryCache.empty) - def runE[M[_]: FetchMonadError]: M[FetchEnv] = - Fetch.runEnv[M](fa, InMemoryCache.empty) + def runE: Task[FetchEnv] = + Fetch.runEnv(fa, InMemoryCache.empty) - def runA[M[_]: FetchMonadError]: M[A] = - Fetch.run[M](fa, InMemoryCache.empty) + def runA: Task[A] = + Fetch.run(fa, InMemoryCache.empty) - def runF[M[_]: FetchMonadError](cache: DataSourceCache): M[(FetchEnv, A)] = - Fetch.runFetch[M](fa, cache) + def runF(cache: DataSourceCache): Task[(FetchEnv, A)] = + Fetch.runFetch(fa, cache) - def runE[M[_]: FetchMonadError](cache: DataSourceCache): M[FetchEnv] = - Fetch.runEnv[M](fa, cache) + def runE(cache: DataSourceCache): Task[FetchEnv] = + Fetch.runEnv(fa, cache) - def runA[M[_]: FetchMonadError](cache: DataSourceCache): M[A] = - Fetch.run[M](fa, cache) + def runA(cache: DataSourceCache): Task[A] = + Fetch.run(fa, cache) } } diff --git a/shared/src/test/scala/FetchFutureTests.scala b/shared/src/test/scala/FetchFutureTests.scala new file mode 100644 index 00000000..9cdc136a --- /dev/null +++ b/shared/src/test/scala/FetchFutureTests.scala @@ -0,0 +1,136 @@ +/* + * Copyright 2016 47 Degrees, LLC. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import scala.concurrent._ +import scala.concurrent.duration._ + +import org.scalatest._ + +import monix.eval._ +import monix.execution.Scheduler +import cats.data.NonEmptyList +import cats.std.list._ +import fetch._ + +class FetchFutureTests extends AsyncFreeSpec with Matchers { + implicit def executionContext = Scheduler.Implicits.global + override def newInstance = new FetchFutureTests + + case class ArticleId(id: Int) + case class Article(id: Int, content: String) { + def author: Int = id + 1 + } + + implicit object ArticleFuture extends DataSource[ArticleId, Article] { + override def name = "ArticleFuture" + override def fetchOne(id: ArticleId): Task[Option[Article]] = + Task.pure(Option(Article(id.id, "An article with id " + id.id))) + override def fetchMany(ids: NonEmptyList[ArticleId]): Task[Map[ArticleId, Article]] = { + Task.now({ + ids.unwrap.map(tid => (tid, Article(tid.id, "An article with id " + tid.id))).toMap + }) + } + } + + def article(id: Int): Fetch[Article] = Fetch(ArticleId(id)) + + case class AuthorId(id: Int) + case class Author(id: Int, name: String) + + implicit object AuthorFuture extends DataSource[AuthorId, Author] { + override def name = "AuthorFuture" + override def fetchOne(id: AuthorId): Task[Option[Author]] = + Task.now(Option(Author(id.id, "@egg" + id.id))) + override def fetchMany(ids: NonEmptyList[AuthorId]): Task[Map[AuthorId, Author]] = { + Task.now({ + ids.unwrap.map(tid => (tid, Author(tid.id, "@egg" + tid.id))).toMap + }) + } + } + + def author(a: Article): Fetch[Author] = Fetch(AuthorId(a.author)) + + def toFuture[A](task: Task[A]): Future[A] = { + val promise: Promise[A] = Promise() + task.runAsync( + new Callback[A] { + def onSuccess(value: A): Unit = { promise.success(value); () } + def onError(ex: Throwable): Unit = { promise.failure(ex); () } + }) + promise.future + } + + "We can interpret a fetch into a future" in { + val fetch: Fetch[Article] = article(1) + + val task: Task[Article] = Fetch.run(fetch) + val fut: Future[Article] = toFuture(task) + + fut.map(_ shouldEqual Article(1, "An article with id 1")) + } + + "We can combine several data sources and interpret a fetch into a future" in { + val fetch: Fetch[(Article, Author)] = for { + art <- article(1) + author <- author(art) + } yield (art, author) + + val fut: Future[(Article, Author)] = toFuture(Fetch.run(fetch)) + + fut.map(_ shouldEqual (Article(1, "An article with id 1"), Author(2, "@egg2"))) + } + + "We can use combinators in a for comprehension and interpret a fetch into a future" in { + val fetch: Fetch[List[Article]] = for { + articles <- Fetch.traverse(List(1, 1, 2))(article) + } yield articles + + val fut: Future[List[Article]] = toFuture(Fetch.run(fetch)) + + fut.map( + _ shouldEqual List( + Article(1, "An article with id 1"), + Article(1, "An article with id 1"), + Article(2, "An article with id 2") + ) + ) + } + + "We can use combinators and multiple sources in a for comprehension and interpret a fetch into a future" in { + val fetch = for { + articles <- Fetch.traverse(List(1, 1, 2))(article) + authors <- Fetch.traverse(articles)(author) + } yield (articles, authors) + + val fut: Future[(List[Article], List[Author])] = + toFuture(Fetch.run(fetch, InMemoryCache.empty)) + + fut.map( + _ shouldEqual ( + List( + Article(1, "An article with id 1"), + Article(1, "An article with id 1"), + Article(2, "An article with id 2") + ), + List( + Author(2, "@egg2"), + Author(2, "@egg2"), + Author(3, "@egg3") + ) + ) + ) + } +} diff --git a/shared/src/test/scala/FetchTests.scala b/shared/src/test/scala/FetchTests.scala index 3c17ca4e..c9566ee7 100644 --- a/shared/src/test/scala/FetchTests.scala +++ b/shared/src/test/scala/FetchTests.scala @@ -14,66 +14,62 @@ * limitations under the License. */ +import scala.concurrent._ +import scala.concurrent.duration._ + import org.scalatest._ -import cats.{Eval, Id, MonadError} +import monix.eval._ +import monix.execution.Scheduler import cats.data.NonEmptyList import cats.std.list._ - import fetch._ object TestHelper { - - import fetch.implicits._ import fetch.syntax._ - val M: MonadError[Eval, Throwable] = implicits.evalMonadError + case class NotFound() extends Throwable - final case class NotFound() extends Throwable - - final case class One(id: Int) + case class One(id: Int) implicit object OneSource extends DataSource[One, Int] { override def name = "OneSource" - override def fetchOne(id: One): Eval[Option[Int]] = { - M.pure(Option(id.id)) + override def fetchOne(id: One): Task[Option[Int]] = { + Task.pure(Option(id.id)) } - override def fetchMany(ids: NonEmptyList[One]): Eval[Map[One, Int]] = - M.pure(ids.unwrap.map(one => (one, one.id)).toMap) + override def fetchMany(ids: NonEmptyList[One]): Task[Map[One, Int]] = + Task.pure(ids.unwrap.map(one => (one, one.id)).toMap) } def one(id: Int): Fetch[Int] = Fetch(One(id)) - final case class AnotherOne(id: Int) + case class AnotherOne(id: Int) implicit object AnotheroneSource extends DataSource[AnotherOne, Int] { override def name = "AnotherOneSource" - override def fetchOne(id: AnotherOne): Eval[Option[Int]] = - M.pure(Option(id.id)) - override def fetchMany(ids: NonEmptyList[AnotherOne]): Eval[Map[AnotherOne, Int]] = - M.pure(ids.unwrap.map(anotherone => (anotherone, anotherone.id)).toMap) + override def fetchOne(id: AnotherOne): Task[Option[Int]] = + Task.pure(Option(id.id)) + override def fetchMany(ids: NonEmptyList[AnotherOne]): Task[Map[AnotherOne, Int]] = + Task.pure(ids.unwrap.map(anotherone => (anotherone, anotherone.id)).toMap) } def anotherOne(id: Int): Fetch[Int] = Fetch(AnotherOne(id)) - final case class Many(n: Int) + case class Many(n: Int) implicit object ManySource extends DataSource[Many, List[Int]] { override def name = "ManySource" - override def fetchOne(id: Many): Eval[Option[List[Int]]] = - M.pure(Option(0 until id.n toList)) - override def fetchMany(ids: NonEmptyList[Many]): Eval[Map[Many, List[Int]]] = - M.pure(ids.unwrap.map(m => (m, 0 until m.n toList)).toMap) + override def fetchOne(id: Many): Task[Option[List[Int]]] = + Task.pure(Option(0 until id.n toList)) + override def fetchMany(ids: NonEmptyList[Many]): Task[Map[Many, List[Int]]] = + Task.pure(ids.unwrap.map(m => (m, 0 until m.n toList)).toMap) } - final case class Never() + case class Never() implicit object NeverSource extends DataSource[Never, Int] { override def name = "NeverSource" - override def fetchOne(id: Never): Eval[Option[Int]] = - M.pure(None) - override def fetchMany(ids: NonEmptyList[Never]): Eval[Map[Never, Int]] = - M.pure(Map.empty[Never, Int]) + override def fetchOne(id: Never): Task[Option[Int]] = + Task.pure(None) + override def fetchMany(ids: NonEmptyList[Never]): Task[Map[Never, Int]] = + Task.pure(Map.empty[Never, Int]) } def many(id: Int): Fetch[List[Int]] = Fetch(Many(id)) - def runEnv[A](f: Fetch[A]): FetchEnv = - Fetch.runEnv[Eval](f).value - def totalFetched(rs: Seq[Round]): Int = rs.filterNot(_.cached) .foldLeft(0)((acc, round) => @@ -100,23 +96,37 @@ object TestHelper { case other => false } ) -} -class FetchSyntaxTests extends FreeSpec with Matchers { + def toFuture[A](task: Task[A])( + implicit s: Scheduler + ): Future[A] = { + val promise: Promise[A] = Promise() + task.runAsync( + new Callback[A] { + def onSuccess(value: A): Unit = { promise.success(value); () } + def onError(ex: Throwable): Unit = { promise.failure(ex); () } + }) + promise.future + } +} - import fetch.implicits._ +class FetchSyntaxTests extends AsyncFreeSpec with Matchers { import fetch.syntax._ import TestHelper._ + implicit def executionContext = Scheduler.Implicits.global + override def newInstance = new FetchSyntaxTests + "Cartesian syntax is implicitly concurrent" in { import cats.syntax.cartesian._ val fetch: Fetch[(Int, List[Int])] = (one(1) |@| many(3)).tupled - val env = Fetch.runEnv[Eval](fetch).value - val rounds = env.rounds + val task = Fetch.runEnv(fetch) - concurrent(rounds).size shouldEqual 1 + toFuture(task).map(env => { + concurrent(env.rounds).size shouldEqual 1 + }) } "Apply syntax is implicitly concurrent" in { @@ -124,12 +134,14 @@ class FetchSyntaxTests extends FreeSpec with Matchers { val fetch: Fetch[Int] = Fetch.pure((x: Int, y: Int) => x + y).ap2(one(1), one(2)) - val env = Fetch.runEnv[Eval](fetch).value - val rounds = env.rounds + val task = Fetch.runEnv(fetch) - concurrent(rounds).size shouldEqual 1 - totalBatches(rounds) shouldEqual 1 - totalFetched(rounds) shouldEqual 2 + toFuture(task).map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) + + stats shouldEqual (1, 1, 2) + }) } "`fetch` syntax allows lifting of any value to the context of a fetch" in { @@ -138,64 +150,75 @@ class FetchSyntaxTests extends FreeSpec with Matchers { "`fetch` syntax allows lifting of any `Throwable` as a failure on a fetch" in { case object Ex extends RuntimeException - val ME = implicitly[MonadError[Eval, Throwable]] - val e1 = ME.attempt(Fetch.run[Eval](Fetch.error(Ex))).value - val e2 = ME.attempt(Fetch.run[Eval](Ex.fetch)).value - e1 shouldEqual e2 + + val e1 = Fetch + .run(Fetch.error(Ex)) + .onErrorRecoverWith({ + case Ex => Task.now("thrown") + }) + + val e2 = Fetch + .run(Ex.fetch) + .onErrorRecoverWith({ + case Ex => Task.now("thrown") + }) + + toFuture(Task.mapBoth(e1, e2)(_ == _)).map(_ shouldEqual true) } "`join` syntax is equivalent to `Fetch#join`" in { - val join1 = Fetch.join(one(1), many(3)).runA[Eval].value - val join2 = one(1).join(many(3)).runA[Eval].value + val join1 = Fetch.join(one(1), many(3)) + val join2 = one(1).join(many(3)) - join1 shouldEqual join2 + toFuture(Task.mapBoth(Fetch.run(join1), Fetch.run(join2))(_ == _)).map(_ shouldEqual true) } "`runF` syntax is equivalent to `Fetch#runFetch`" in { - val rf1 = Fetch.runFetch(1.fetch).value - val rf2 = 1.fetch.runF[Eval].value + val rf1 = Fetch.runFetch(1.fetch) + val rf2 = 1.fetch.runF - rf1 shouldEqual rf2 + toFuture(Task.mapBoth(rf1, rf2)(_ == _)).map(_ shouldEqual true) } "`runE` syntax is equivalent to `Fetch#runEnv`" in { - val rf1 = Fetch.runEnv(1.fetch).value - val rf2 = 1.fetch.runE[Eval].value + val rf1 = Fetch.runEnv(1.fetch) + val rf2 = 1.fetch.runE - rf1 shouldEqual rf2 + toFuture(Task.mapBoth(rf1, rf2)(_ == _)).map(_ shouldEqual true) } "`runA` syntax is equivalent to `Fetch#run`" in { - val rf1 = Fetch.run(1.fetch).value - val rf2 = 1.fetch.runA[Eval].value + val rf1 = Fetch.run(1.fetch) + val rf2 = 1.fetch.runA - rf1 shouldEqual rf2 + toFuture(Task.mapBoth(rf1, rf2)(_ == _)).map(_ shouldEqual true) } } -class FetchTests extends FreeSpec with Matchers { - - import fetch.implicits._ +class FetchTests extends AsyncFreeSpec with Matchers { import TestHelper._ + implicit def executionContext = Scheduler.Implicits.global + override def newInstance = new FetchTests + "We can lift plain values to Fetch" in { val fetch: Fetch[Int] = Fetch.pure(42) - Fetch.run[Eval](fetch).value shouldEqual 42 + Fetch.run(fetch).coeval.value shouldEqual Right(42) } "Data sources with errors throw fetch failures" in { val fetch: Fetch[Int] = Fetch(Never()) intercept[FetchFailure] { - Fetch.runEnv[Eval](fetch).value + Fetch.runEnv(fetch).coeval.value } match { case FetchFailure(env) => { env.rounds.headOption match { case Some(Round(_, _, OneRound(id), _, _, _)) => id shouldEqual Never() - case _ => fail("Expected Some(Round(_,_, Oneround(id),_,_,_)) but None found") + case _ => fail("Expected Some(Round(_,_, Oneround(id),_,_,_))") } } } @@ -204,10 +227,7 @@ class FetchTests extends FreeSpec with Matchers { "Data sources with errors throw fetch failures that can be handled" in { val fetch: Fetch[Int] = Fetch(Never()) - M.handleErrorWith( - Fetch.run[Eval](fetch) - )(err => Eval.now(42)) - .value shouldEqual 42 + Fetch.run(fetch).onErrorHandleWith(err => Task.now(42)).coeval.value shouldEqual Right(42) } "Data sources with errors and cached values throw fetch failures with the cache" in { @@ -217,7 +237,7 @@ class FetchTests extends FreeSpec with Matchers { ) intercept[FetchFailure] { - Fetch.run[Eval](fetch, cache).value + Fetch.run(fetch, cache).coeval.value } match { case FetchFailure(env) => env.cache shouldEqual cache } @@ -228,31 +248,29 @@ class FetchTests extends FreeSpec with Matchers { val cache = InMemoryCache( NeverSource.identity(Never()) -> 1 ) - Fetch.run[Eval](fetch, cache).value shouldEqual 1 + Fetch.run(fetch, cache).coeval.value shouldEqual Right(1) } "We can lift errors to Fetch" in { val fetch: Fetch[Int] = Fetch.error(NotFound()) intercept[NotFound] { - Fetch.run[Eval](fetch).value + Fetch.run(fetch).coeval.value } } "We can lift handle and recover from errors in Fetch" in { val fetch: Fetch[Int] = Fetch.error(NotFound()) - M.handleErrorWith( - Fetch.run[Eval](fetch) - )(err => M.pure(42)) - .value shouldEqual 42 + + Fetch.run(fetch).onErrorHandleWith(err => Task.pure(42)).coeval.value shouldEqual Right(42) } "We can lift values which have a Data Source to Fetch" in { - Fetch.run[Eval](one(1)).value shouldEqual 1 + Fetch.run(one(1)).coeval.value shouldEqual Right(1) } "We can map over Fetch values" in { val fetch = one(1).map(_ + 1) - Fetch.run[Eval](fetch).value shouldEqual 2 + Fetch.run(fetch).coeval.value shouldEqual Right(2) } "We can use fetch inside a for comprehension" in { @@ -261,7 +279,7 @@ class FetchTests extends FreeSpec with Matchers { t <- one(2) } yield (o, t) - Fetch.run[Eval](fetch).value shouldEqual (1, 2) + Fetch.run(fetch).coeval.value shouldEqual Right((1, 2)) } "Monadic bind implies sequential execution" in { @@ -270,7 +288,7 @@ class FetchTests extends FreeSpec with Matchers { t <- one(2) } yield (o, t) - Fetch.runEnv[Eval](fetch).value.rounds.size shouldEqual 2 + Fetch.runEnv(fetch).coeval.value.right.map(_.rounds.size) shouldEqual Right(2) } "We can mix data sources" in { @@ -279,23 +297,25 @@ class FetchTests extends FreeSpec with Matchers { m <- many(3) } yield (o, m) - Fetch.run[Eval](fetch).value shouldEqual (1, List(0, 1, 2)) + Fetch.run(fetch).coeval.value shouldEqual Right((1, List(0, 1, 2))) } "We can use Fetch as a cartesian" in { import cats.syntax.cartesian._ val fetch: Fetch[(Int, List[Int])] = (one(1) |@| many(3)).tupled + val task = Fetch.run(fetch) - Fetch.run[Eval](fetch).value shouldEqual (1, List(0, 1, 2)) + toFuture(task).map(_ shouldEqual (1, List(0, 1, 2))) } "We can use Fetch as an applicative" in { import cats.syntax.cartesian._ val fetch: Fetch[Int] = (one(1) |@| one(2) |@| one(3)).map(_ + _ + _) + val task = Fetch.run(fetch) - Fetch.run[Eval](fetch).value shouldEqual 6 + toFuture(task).map(_ shouldEqual 6) } "We can traverse over a list with a Fetch for each element" in { @@ -307,7 +327,8 @@ class FetchTests extends FreeSpec with Matchers { ones <- manies.traverse(one) } yield ones - Fetch.run[Eval](fetch).value shouldEqual List(0, 1, 2) + val task = Fetch.run(fetch) + toFuture(task).map(_ shouldEqual List(0, 1, 2)) } "Traversals are implicitly concurrent" in { @@ -319,49 +340,57 @@ class FetchTests extends FreeSpec with Matchers { ones <- manies.traverse(one) } yield ones - val rounds = Fetch.runEnv[Eval](fetch).value.rounds + val task = Fetch.runEnv(fetch) - concurrent(rounds).size shouldEqual 1 + toFuture(task).map(env => { + concurrent(env.rounds).size shouldEqual 1 + }) } "The product of two fetches implies parallel fetching" in { val fetch: Fetch[(Int, List[Int])] = Fetch.join(one(1), many(3)) - val rounds = Fetch.runEnv[Eval](fetch).value.rounds + val task = Fetch.runEnv(fetch) - concurrent(rounds).size shouldEqual 1 + toFuture(task).map(env => { + concurrent(env.rounds).size shouldEqual 1 + }) } "If a fetch fails in the left hand of a product the product will fail" in { val fetch: Fetch[(Int, List[Int])] = Fetch.join(Fetch.error(NotFound()), many(3)) + val task = Fetch.run(fetch) - intercept[NotFound] { - Fetch.run[Eval](fetch).value - } + toFuture(task.onErrorRecoverWith({ + case ex: NotFound => Task.now("not found") + })).map(_ shouldEqual "not found") } "If a fetch fails in the right hand of a product the product will fail" in { val fetch: Fetch[(List[Int], Int)] = Fetch.join(many(3), Fetch.error(NotFound())) + val task = Fetch.run(fetch) - intercept[NotFound] { - Fetch.run[Eval](fetch).value - } + toFuture(task.onErrorRecoverWith({ + case ex: NotFound => Task.now("not found") + })).map(_ shouldEqual "not found") } "If there is a missing identity in the left hand of a product the product will fail" in { val fetch: Fetch[(Int, List[Int])] = Fetch.join(Fetch(Never()), many(3)) + val task = Fetch.run(fetch) - intercept[FetchFailure] { - Fetch.run[Eval](fetch).value - } + toFuture(task.onErrorRecoverWith({ + case ex: FetchFailure => Task.now("fail!") + })).map(_ shouldEqual "fail!") } "If there is a missing identity in the right hand of a product the product will fail" in { val fetch: Fetch[(List[Int], Int)] = Fetch.join(many(3), Fetch(Never())) + val task = Fetch.run(fetch) - intercept[FetchFailure] { - Fetch.run[Eval](fetch).value - } + toFuture(task.onErrorRecoverWith({ + case ex: FetchFailure => Task.now("fail!") + })).map(_ shouldEqual "fail!") } "The product of concurrent fetches implies everything fetched concurrently" in { @@ -373,12 +402,14 @@ class FetchTests extends FreeSpec with Matchers { one(4) ) - val env = Fetch.runEnv[Eval](fetch).value - val rounds = env.rounds + val task = Fetch.runEnv(fetch) - concurrent(rounds).size shouldEqual 1 - totalBatches(rounds) shouldEqual 1 - totalFetched(rounds) shouldEqual 4 + toFuture(task).map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) + + stats shouldEqual (1, 1, 4) + }) } "The product of concurrent fetches of the same type implies everything fetched in a single batch" in { @@ -398,12 +429,14 @@ class FetchTests extends FreeSpec with Matchers { one(3) ) - val env = Fetch.runEnv[Eval](fetch).value - val rounds = env.rounds + val task = Fetch.runEnv(fetch) + + toFuture(task).map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) - concurrent(rounds).size shouldEqual 2 - totalBatches(rounds) shouldEqual 2 - totalFetched(rounds) shouldEqual 4 + stats shouldEqual (2, 2, 4) + }) } "Every level of joined concurrent fetches is combined and batched" in { @@ -420,12 +453,14 @@ class FetchTests extends FreeSpec with Matchers { } yield c ) - val env = Fetch.runEnv[Eval](fetch).value - val rounds = env.rounds + val task = Fetch.runEnv(fetch) + + toFuture(task).map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) - concurrent(rounds).size shouldEqual 3 - totalBatches(rounds) shouldEqual 3 - totalFetched(rounds) shouldEqual 6 + stats shouldEqual (3, 3, 6) + }) } "Every level of sequenced concurrent of concurrent fetches is batched" in { @@ -445,21 +480,27 @@ class FetchTests extends FreeSpec with Matchers { Fetch.sequence(List(one(15), one(16), one(17))) ) - val env = Fetch.runEnv[Eval](fetch).value - val rounds = env.rounds + val task = Fetch.runEnv(fetch) - concurrent(rounds).size shouldEqual 3 - totalBatches(rounds) shouldEqual 3 - totalFetched(rounds) shouldEqual 9 + 4 + 6 + toFuture(task).map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) + + stats shouldEqual (3, 3, 9 + 4 + 6) + }) } "The product of two fetches from the same data source implies batching" in { val fetch: Fetch[(Int, Int)] = Fetch.join(one(1), one(3)) - val rounds = Fetch.runEnv[Eval](fetch).value.rounds + val task = Fetch.runEnv(fetch) + + toFuture(task).map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalBatches(rounds)) - concurrent(rounds).size shouldEqual 1 - totalBatches(concurrent(rounds)) shouldEqual 1 + stats shouldEqual (1, 1) + }) } "We can depend on previous computations of Fetch values" in { @@ -468,83 +509,104 @@ class FetchTests extends FreeSpec with Matchers { t <- one(o + 1) } yield o + t - Fetch.run[Eval](fetch).value shouldEqual 3 + Fetch.run(fetch).coeval.value shouldEqual Right(3) } "We can collect a list of Fetch into one" in { val sources: List[Fetch[Int]] = List(one(1), one(2), one(3)) val fetch: Fetch[List[Int]] = Fetch.sequence(sources) - Fetch.run[Eval](fetch).value shouldEqual List(1, 2, 3) + + val task = Fetch.run(fetch) + + toFuture(task).map(_ shouldEqual List(1, 2, 3)) } "We can collect a list of Fetches with heterogeneous sources" in { val sources: List[Fetch[Int]] = List(one(1), one(2), one(3), anotherOne(4), anotherOne(5)) val fetch: Fetch[List[Int]] = Fetch.sequence(sources) - Fetch.run[Eval](fetch).value shouldEqual List(1, 2, 3, 4, 5) + + val task = Fetch.run(fetch) + + toFuture(task).map(_ shouldEqual List(1, 2, 3, 4, 5)) } "Sequenced fetches are run concurrently" in { val sources: List[Fetch[Int]] = List(one(1), one(2), one(3), anotherOne(4), anotherOne(5)) val fetch: Fetch[List[Int]] = Fetch.sequence(sources) - val rounds = Fetch.runEnv[Eval](fetch).value.rounds + val task = Fetch.runEnv(fetch) + + toFuture(task).map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalBatches(rounds)) - concurrent(rounds).size shouldEqual 1 - totalBatches(rounds) shouldEqual 2 + stats shouldEqual (1, 2) + }) } "Sequenced fetches are deduped" in { val sources: List[Fetch[Int]] = List(one(1), one(2), one(1)) val fetch: Fetch[List[Int]] = Fetch.sequence(sources) - val rounds = Fetch.runEnv[Eval](fetch).value.rounds + val task = Fetch.runEnv(fetch) + + toFuture(task).map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalFetched(rounds)) - totalFetched(concurrent(rounds)) shouldEqual 2 - concurrent(rounds).size shouldEqual 1 + stats shouldEqual (1, 2) + }) } "Sequenced fetches are not asked for when cached" in { val sources: List[Fetch[Int]] = List(one(1), one(2), one(3), one(4)) val fetch: Fetch[List[Int]] = Fetch.sequence(sources) - val rounds = Fetch - .runEnv[Eval]( - fetch, - InMemoryCache( - OneSource.identity(One(1)) -> 1, - OneSource.identity(One(2)) -> 2 - ) - ) - .value - .rounds + val task = Fetch.runEnv( + fetch, + InMemoryCache( + OneSource.identity(One(1)) -> 1, + OneSource.identity(One(2)) -> 2 + ) + ) - totalFetched(concurrent(rounds)) shouldEqual 2 - concurrent(rounds).size shouldEqual 1 + toFuture(task).map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalFetched(rounds)) + + stats shouldEqual (1, 2) + }) } "We can collect the results of a traversal" in { - val expected = List(1, 2, 3) + val fetch = Fetch.traverse(List(1, 2, 3))(one) - val fetch = Fetch.traverse(expected)(one) + val task = Fetch.run(fetch) - Fetch.run[Eval](fetch).value shouldEqual expected + toFuture(task).map(_ shouldEqual List(1, 2, 3)) } "Traversals are run concurrently" in { val fetch = Fetch.traverse(List(1, 2, 3))(one) - val rounds = Fetch.runEnv[Eval](fetch).value.rounds + val task = Fetch.runEnv(fetch) - concurrent(rounds).size shouldEqual 1 + toFuture(task).map(env => { + concurrent(env.rounds).size shouldEqual 1 + }) } "Duplicated sources are only fetched once" in { val fetch = Fetch.traverse(List(1, 2, 1))(one) - val rounds = Fetch.runEnv[Eval](fetch).value.rounds + val task = Fetch.runEnv(fetch) + + toFuture(task).map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalFetched(rounds)) - concurrent(rounds).size shouldEqual 1 - totalFetched(concurrent(rounds)) shouldEqual 2 + stats shouldEqual (1, 2) + }) } "Sources that can be fetched concurrently inside a for comprehension will be" in { @@ -553,10 +615,14 @@ class FetchTests extends FreeSpec with Matchers { result <- Fetch.traverse(v)(one) } yield result - val rounds = Fetch.runEnv[Eval](fetch).value.rounds + val task = Fetch.runEnv(fetch) - concurrent(rounds).size shouldEqual 1 - totalFetched(concurrent(rounds)) shouldEqual 2 + toFuture(task).map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalFetched(rounds)) + + stats shouldEqual (1, 2) + }) } "Elements are cached and thus not fetched more than once" in { @@ -571,9 +637,13 @@ class FetchTests extends FreeSpec with Matchers { _ <- one(1) } yield aOne + anotherOne - val rounds = Fetch.runEnv[Eval](fetch).value.rounds + val task = Fetch.runEnv(fetch) + + toFuture(task).map(env => { + val rounds = env.rounds - totalFetched(rounds) shouldEqual 3 + totalFetched(rounds) shouldEqual 3 + }) } "Elements that are cached won't be fetched" in { @@ -588,22 +658,23 @@ class FetchTests extends FreeSpec with Matchers { _ <- one(1) } yield aOne + anotherOne - val rounds = Fetch - .runEnv[Eval]( - fetch, - InMemoryCache( - OneSource.identity(One(1)) -> 1, - OneSource.identity(One(2)) -> 2, - OneSource.identity(One(3)) -> 3 - ) - ) - .value - .rounds + val task = Fetch.runEnv( + fetch, + InMemoryCache( + OneSource.identity(One(1)) -> 1, + OneSource.identity(One(2)) -> 2, + OneSource.identity(One(3)) -> 3 + ) + ) - totalFetched(rounds) shouldEqual 0 + toFuture(task).map(env => { + val rounds = env.rounds + + totalFetched(rounds) shouldEqual 0 + }) } - final case class MyCache(state: Map[Any, Any] = Map.empty[Any, Any]) extends DataSourceCache { + case class MyCache(state: Map[Any, Any] = Map.empty[Any, Any]) extends DataSourceCache { override def get(k: DataSourceIdentity): Option[Any] = state.get(k) override def update[A](k: DataSourceIdentity, v: A): MyCache = copy(state = state.updated(k, v)) @@ -631,20 +702,21 @@ class FetchTests extends FreeSpec with Matchers { _ <- one(1) } yield aOne + anotherOne - val rounds = Fetch - .runEnv[Eval]( - fetch, - InMemoryCache( - OneSource.identity(One(1)) -> 1, - OneSource.identity(One(2)) -> 2, - OneSource.identity(One(3)) -> 3, - ManySource.identity(Many(2)) -> List(0, 1) - ) - ) - .value - .rounds + val task = Fetch.runEnv( + fetch, + InMemoryCache( + OneSource.identity(One(1)) -> 1, + OneSource.identity(One(2)) -> 2, + OneSource.identity(One(3)) -> 3, + ManySource.identity(Many(2)) -> List(0, 1) + ) + ) + + toFuture(task).map(env => { + val rounds = env.rounds - totalFetched(rounds) shouldEqual 0 + totalFetched(rounds) shouldEqual 0 + }) } case class ForgetfulCache() extends DataSourceCache { @@ -663,9 +735,11 @@ class FetchTests extends FreeSpec with Matchers { _ <- one(1) } yield aOne + anotherOne - val rounds = Fetch.runEnv[Eval](fetch, ForgetfulCache()).value.rounds + val task = Fetch.runEnv(fetch, ForgetfulCache()) - totalFetched(rounds) shouldEqual 7 + toFuture(task).map(env => { + totalFetched(env.rounds) shouldEqual 7 + }) } "We can use a custom cache that discards elements together with concurrent fetches" in { @@ -680,111 +754,10 @@ class FetchTests extends FreeSpec with Matchers { _ <- one(1) } yield aOne + anotherOne - val rounds = Fetch.runEnv[Eval](fetch, ForgetfulCache()).value.rounds - - totalFetched(rounds) shouldEqual 10 - } -} - -class FetchFutureTests extends AsyncFreeSpec with Matchers { - import scala.concurrent._ - import scala.concurrent.ExecutionContext.global - - import cats.std.future._ - - implicit def executionContext = global - override def newInstance = new FetchFutureTests - - final case class ArticleId(id: Int) - final case class Article(id: Int, content: String) { - def author: Int = id + 1 - } - - implicit object ArticleFuture extends DataSource[ArticleId, Article] { - override def name = "ArticleFuture" - override def fetchOne(id: ArticleId): Eval[Option[Article]] = - Eval.later(Option(Article(id.id, "An article with id " + id.id))) - override def fetchMany(ids: NonEmptyList[ArticleId]): Eval[Map[ArticleId, Article]] = { - Eval.later({ - ids.unwrap.map(tid => (tid, Article(tid.id, "An article with id " + tid.id))).toMap - }) - } - } - - def article(id: Int): Fetch[Article] = Fetch(ArticleId(id)) - - final case class AuthorId(id: Int) - final case class Author(id: Int, name: String) + val task = Fetch.runEnv(fetch, ForgetfulCache()) - implicit object AuthorFuture extends DataSource[AuthorId, Author] { - override def name = "AuthorFuture" - override def fetchOne(id: AuthorId): Eval[Option[Author]] = - Eval.later(Option(Author(id.id, "@egg" + id.id))) - override def fetchMany(ids: NonEmptyList[AuthorId]): Eval[Map[AuthorId, Author]] = { - Eval.later({ - ids.unwrap.map(tid => (tid, Author(tid.id, "@egg" + tid.id))).toMap - }) - } - } - - def author(a: Article): Fetch[Author] = Fetch(AuthorId(a.author)) - - "We can interpret a fetch into a future" in { - val fetch: Fetch[Article] = article(1) - - val fut: Future[Article] = Fetch.run(fetch) - - fut.map(_ shouldEqual Article(1, "An article with id 1")) - } - - "We can combine several data sources and interpret a fetch into a future" in { - val fetch: Fetch[(Article, Author)] = for { - art <- article(1) - author <- author(art) - } yield (art, author) - - val fut: Future[(Article, Author)] = Fetch.run(fetch) - - fut.map(_ shouldEqual (Article(1, "An article with id 1"), Author(2, "@egg2"))) - } - - "We can use combinators in a for comprehension and interpret a fetch into a future" in { - val fetch: Fetch[List[Article]] = for { - articles <- Fetch.traverse(List(1, 1, 2))(article) - } yield articles - - val fut: Future[List[Article]] = Fetch.run(fetch) - - fut.map( - _ shouldEqual List( - Article(1, "An article with id 1"), - Article(1, "An article with id 1"), - Article(2, "An article with id 2") - ) - ) - } - - "We can use combinators and multiple sources in a for comprehension and interpret a fetch into a future" in { - val fetch = for { - articles <- Fetch.traverse(List(1, 1, 2))(article) - authors <- Fetch.traverse(articles)(author) - } yield (articles, authors) - - val fut: Future[(List[Article], List[Author])] = Fetch.run(fetch, InMemoryCache.empty) - - fut.map( - _ shouldEqual ( - List( - Article(1, "An article with id 1"), - Article(1, "An article with id 1"), - Article(2, "An article with id 2") - ), - List( - Author(2, "@egg2"), - Author(2, "@egg2"), - Author(3, "@egg3") - ) - ) - ) + toFuture(task).map(env => { + totalFetched(env.rounds) shouldEqual 10 + }) } } From 69081f8aae27fc1cc8796bc6e6aac34b2ef899a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sun, 29 May 2016 15:30:30 +0200 Subject: [PATCH 03/40] Update docs --- README.md | 101 ++++--- build.sbt | 10 +- docs/src/tut/docs.md | 362 ++++++++++++------------- docs/src/tut/index.md | 90 ++++-- shared/src/main/scala/implicits.scala | 18 +- shared/src/test/scala/FetchTests.scala | 4 +- tut/README.md | 98 ++++--- 7 files changed, 395 insertions(+), 288 deletions(-) diff --git a/README.md b/README.md index d24d50a2..bc356e21 100644 --- a/README.md +++ b/README.md @@ -12,11 +12,13 @@ A library for Simple & Efficient data access in Scala and Scala.js Add the following dependency to your project's build file. +For Scala 2.11.x: + ```scala "com.fortysevendeg" %% "fetch" %% "0.2.0" ``` -Or, if using Scala.js: +Or, if using Scala.js (0.6.x): ```scala "com.fortysevendeg" %%% "fetch" %% "0.2.0" @@ -45,31 +47,33 @@ Data Sources take two type parameters: ```scala +import monix.eval.Task +import cats.data.NonEmptyList + trait DataSource[Identity, Result]{ - def fetchOne(id: Identity): Eval[Option[Result]] - def fetchMany(ids: NonEmptyList[Identity]): Eval[Map[Identity, Result]] + def fetchOne(id: Identity): Task[Option[Result]] + def fetchMany(ids: NonEmptyList[Identity]): Task[Map[Identity, Result]] } ``` We'll implement a dummy data source that can convert integers to strings. For convenience, we define a `fetchString` function that lifts identities (`Int` in our dummy data source) to a `Fetch`. ```scala -import cats.Eval +import monix.eval.Task import cats.data.NonEmptyList import cats.std.list._ - import fetch._ implicit object ToStringSource extends DataSource[Int, String]{ - override def fetchOne(id: Int): Eval[Option[String]] = { - Eval.later({ - println(s"ToStringSource $id") + override def fetchOne(id: Int): Task[Option[String]] = { + Task.now({ + println(s"[${Thread.currentThread.getId}] One ToString $id") Option(id.toString) }) } - override def fetchMany(ids: NonEmptyList[Int]): Eval[Map[Int, String]] = { - Eval.later({ - println(s"ToStringSource $ids") + override def fetchMany(ids: NonEmptyList[Int]): Task[Map[Int, String]] = { + Task.now({ + println(s"[${Thread.currentThread.getId}] Many ToString $ids") ids.unwrap.map(i => (i, i.toString)).toMap }) } @@ -83,20 +87,31 @@ def fetchString(n: Int): Fetch[String] = Fetch(n) // or, more explicitly: Fetch( Now that we can convert `Int` values to `Fetch[String]`, let's try creating a fetch. ```scala -import fetch.implicits._ import fetch.syntax._ val fetchOne: Fetch[String] = fetchString(1) ``` -Now that we have created a fetch, we can run it to a target monad. Note that the target monad (`Eval` in our example) needs to implement `MonadError[M, Throwable]`, we provide an instance for `Eval` in `fetch.implicits._`, that's why we imported it. +Now that we have created a fetch, we can run it to a `Task`. Note that when we create a task we are not computing any value yet. Having a `Task` instance allows us to try to run it synchronously or asynchronously, choosing a scheduler. ```scala -val result: String = fetchOne.runA[Eval].value -// ToStringSource 1 -// result: String = 1 +val result: Task[String] = fetchOne.runA +// result: monix.eval.Task[String] = BindSuspend(,) +``` + +We can try to run `result` synchronously with `Task#coeval`. + +```scala +import monix.execution.Scheduler.Implicits.global +// import monix.execution.Scheduler.Implicits.global + +result.coeval.value +// [1026] One ToString 1 +// res3: Either[monix.execution.CancelableFuture[String],String] = Right(1) ``` +Since we calculated the results eagerly using `Task#now`, we can run this fetch synchronously. + As you can see in the previous example, the `ToStringSource` is queried once to get the value of 1. ## Batching @@ -105,33 +120,43 @@ Multiple fetches to the same data source are automatically batched. For illustra ```scala import cats.syntax.cartesian._ +// import cats.syntax.cartesian._ val fetchThree: Fetch[(String, String, String)] = (fetchString(1) |@| fetchString(2) |@| fetchString(3)).tupled +// fetchThree: fetch.Fetch[(String, String, String)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List(2, 3)),ToStringSource$@77b748dd)))),),) + +val result: Task[(String, String, String)] = fetchThree.runA +// result: monix.eval.Task[(String, String, String)] = BindSuspend(,) ``` -When executing the above fetch, note how the three identities get batched and the data source is only queried once. + + + +When executing the above fetch, note how the three identities get batched and the data source is only queried once. Let's pretend we have a function from `Task[A]` to `A` called `await`. ```scala -val result: (String, String, String) = fetchThree.runA[Eval].value -// ToStringSource OneAnd(1,List(2, 3)) -// result: (String, String, String) = (1,2,3) +await(result) +// [1026] Many ToString OneAnd(1,List(2, 3)) +// res4: (String, String, String) = (1,2,3) ``` -## Concurrency +## Parallelism + +If we combine two independent fetches from different data sources, the fetches can be run in parallel. First, let's add a data source that fetches a string's size. -If we combine two independent fetches from different data sources, the fetches will be run concurrently. First, let's add a data source that fetches a string's size. +This time, instead of creating the results with `Task#now` we are going to do it with `Task#apply` for emulating an asynchronous data source. ```scala implicit object LengthSource extends DataSource[String, Int]{ - override def fetchOne(id: String): Eval[Option[Int]] = { - Eval.later({ - println(s"LengthSource $id") + override def fetchOne(id: String): Task[Option[Int]] = { + Task({ + println(s"[${Thread.currentThread.getId}] One Length $id") Option(id.size) }) } - override def fetchMany(ids: NonEmptyList[String]): Eval[Map[String, Int]] = { - Eval.later({ - println(s"LengthSource $ids") + override def fetchMany(ids: NonEmptyList[String]): Task[Map[String, Int]] = { + Task({ + println(s"[${Thread.currentThread.getId}] Many Length $ids") ids.unwrap.map(i => (i, i.size)).toMap }) } @@ -144,15 +169,19 @@ And now we can easily receive data from the two sources in a single fetch. ```scala val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled +// fetchMulti: fetch.Fetch[(String, Int)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List()),ToStringSource$@77b748dd), FetchMany(OneAnd(one,List()),LengthSource$@741119c5)))),),) + +val result = fetchMulti.runA +// result: monix.eval.Task[(String, Int)] = BindSuspend(,) ``` -Note how the two independent data fetches are run concurrently, minimizing the latency cost of querying the two data sources. If our target monad was a concurrency monad like `Future`, they'd run in parallel, each in its own logical thread. +Note how the two independent data fetches are run in parallel, minimizing the latency cost of querying the two data sources. ```scala -val result: (String, Int) = fetchMulti.runA[Eval].value -// ToStringSource OneAnd(1,List()) -// LengthSource OneAnd(one,List()) -// result: (String, Int) = (1,3) +await(result) +// [1026] Many ToString OneAnd(1,List()) +// [1027] Many Length OneAnd(one,List()) +// res6: (String, Int) = (1,3) ``` ## Caching @@ -164,13 +193,13 @@ val fetchTwice: Fetch[(String, String)] = for { one <- fetchString(1) two <- fetchString(1) } yield (one, two) +// fetchTwice: fetch.Fetch[(String, String)] = Gosub(Suspend(FetchOne(1,ToStringSource$@77b748dd)),) ``` While running it, notice that the data source is only queried once. The next time the identity is requested it's served from the cache. ```scala -val result: (String, String) = fetchTwice.runA[Eval].value -// ToStringSource 1 +val result: (String, String) = await(fetchTwice.runA) +// [1026] One ToString 1 // result: (String, String) = (1,1) ``` - diff --git a/build.sbt b/build.sbt index cd99f3f8..fc117764 100644 --- a/build.sbt +++ b/build.sbt @@ -62,7 +62,10 @@ lazy val docsSettings = ghpages.settings ++ buildSettings ++ tutSettings ++ Seq( tutSourceDirectory := sourceDirectory.value / "tut", tutTargetDirectory := sourceDirectory.value / "jekyll", tutScalacOptions ~= (_.filterNot(Set("-Ywarn-unused-import", "-Ywarn-dead-code"))), - aggregate in doc := true + aggregate in doc := true, + libraryDependencies ++= Seq( + "io.monix" %%% "monix-eval" % "2.0-RC3" + ) ) lazy val docs = (project in file("docs")) @@ -117,7 +120,10 @@ lazy val readmeSettings = buildSettings ++ tutSettings ++ Seq( tutSourceDirectory := baseDirectory.value, tutTargetDirectory := baseDirectory.value.getParentFile, tutScalacOptions ~= (_.filterNot(Set("-Ywarn-unused-import", "-Ywarn-dead-code"))), - tutNameFilter := """README.md""".r + tutNameFilter := """README.md""".r, + libraryDependencies ++= Seq( + "io.monix" %%% "monix-eval" % "2.0-RC3" + ) ) lazy val readme = (project in file("tut")) diff --git a/docs/src/tut/docs.md b/docs/src/tut/docs.md index 301ff7e5..340d1024 100644 --- a/docs/src/tut/docs.md +++ b/docs/src/tut/docs.md @@ -64,9 +64,12 @@ If something is missing in Fetch that stops you from using it we'd appreciate if In order to tell Fetch how to retrieve data, we must implement the `DataSource` typeclass. ```scala +import monix.eval.Task +import cats.data.NonEmptyList + trait DataSource[Identity, Result]{ - def fetchOne(id: Identity): Eval[Option[Result]] - def fetchMany(ids: NonEmptyList[Identity]): Eval[Map[Identity, Result]] + def fetchOne(id: Identity): Task[Option[Result]] + def fetchMany(ids: NonEmptyList[Identity]): Task[Map[Identity, Result]] } ``` @@ -76,15 +79,15 @@ It takes two type parameters: - `Result`: the type of the data we retrieve (a `User` if we were fetching users) There are two methods: `fetchOne` and `fetchMany`. `fetchOne` receives one identity and must return -an [Eval](https://github.com/typelevel/cats/blob/master/core/src/main/scala/cats/Eval.scala) containing +a [Task](https://github.com/typelevel/cats/blob/master/core/src/main/scala/cats/Eval.scala) containing an optional result. Returning an `Option` Fetch can detect whether an identity couldn't be fetched or no longer exists. -`fetchMany` method takes a non-empty list of identities and must return an `Eval` that containing +`fetchMany` method takes a non-empty list of identities and must return a `Task` containing a map from identities to results. Accepting a list of identities gives Fetch the ability to batch requests to the same data source, and returning a mapping from identities to results, Fetch can detect whenever an identity couldn't be fetched or no longer exists. -Returning `Eval` makes it possible to defer evaluation with a monad when running a fetch. +Returning `Task` makes it possible to try to run a fetch synchronously or asynchronously, choose a scheduler for the I/O bound nature of reading remote data, error handling, memoization and composability. ## Writing your first data source @@ -99,7 +102,7 @@ case class User(id: UserId, username: String) And now we're ready to write our user data source; we'll emulate a database with an in-memory map. ```tut:silent -import cats.Eval +import monix.eval.Task import cats.data.NonEmptyList import cats.std.list._ @@ -112,15 +115,15 @@ val userDatabase: Map[UserId, User] = Map( ) implicit object UserSource extends DataSource[UserId, User]{ - override def fetchOne(id: UserId): Eval[Option[User]] = { - Eval.later({ - println(s"Fetching user $id") + override def fetchOne(id: UserId): Task[Option[User]] = { + Task.now({ + println(s"Fetching one user $id") userDatabase.get(id) }) } - override def fetchMany(ids: NonEmptyList[UserId]): Eval[Map[UserId, User]] = { - Eval.later({ - println(s"Fetching users $ids") + override def fetchMany(ids: NonEmptyList[UserId]): Task[Map[UserId, User]] = { + Task.now({ + println(s"Fetching many users $ids") userDatabase.filterKeys(ids.unwrap.contains) }) } @@ -130,22 +133,21 @@ implicit object UserSource extends DataSource[UserId, User]{ Now that we have a data source we can write a function for fetching users given an id, we just have to pass a `UserId` as an argument to `Fetch`. -```tut:silent +```tut def getUser(id: UserId): Fetch[User] = Fetch(id) // or, more explicitly: Fetch(id)(UserSource) ``` - ### Data sources that don't support batching If your data source doesn't support batching, you can use the `DataSource#batchingNotSupported` method as the implementation of `fetchMany`. Note that it will use the `fetchOne` implementation for requesting identities one at a time. ```tut:silent -implicit object IntSource extends DataSource[Int, Int]{ - override def fetchOne(id: Int): Eval[Option[Int]] = { - Eval.now(Option(id)) +implicit object UnbatchedSource extends DataSource[Int, Int]{ + override def fetchOne(id: Int): Task[Option[Int]] = { + Task(Option(id)) } - override def fetchMany(ids: NonEmptyList[Int]): Eval[Map[Int, Int]] = { + override def fetchMany(ids: NonEmptyList[Int]): Task[Map[Int, Int]] = { batchingNotSupported(ids) } } @@ -157,34 +159,33 @@ We are now ready to create and run fetches. Note the distinction between Fetch c When we are creating and combining `Fetch` values, we are just constructing a recipe of our data dependencies. -```tut:silent -import fetch.implicits._ -import fetch.syntax._ - +```tut:book val fetchUser: Fetch[User] = getUser(1) ``` -A `Fetch` is just a value, and in order to get something out of it, we must execute it. We can execute a `Fetch` value as many times as we want, even to different target monads, since it is just -an immutable value. +A `Fetch` is just a value, and in order to be able to execute it we need to run it to a `Task` first. Running `fetchUser` will give as a `Task[User]`, which we can later execute for the performing the effects of the fetch. -We need to provide a target monad when we want to execute a fetch. We'll be using `Id` for now. -Make sure to import `fetch.implicits._` since Fetch needs an instance of `MonadError[Id, Throwable]` for running -a fetch in the `Id` monad. +```tut:book +import fetch.syntax._ -Note that Fetch provides `MonadError` instances for a variety of different monads like `Eval` or -`Future` so it's likely that you won't have to write your own. +val user: Task[User] = fetchUser.runA +``` -Let's run our first fetch! +We'll try to evaluate the Fetch synchronously with `Task#coeval`. `Coeval` is a type similar to `Task` but which can be evaluated synchronously with its `.value` method. Note that for executing tasks a [monix Scheduler](http://monix.io/docs/2x/execution/scheduler.html) must be implicitly found. ```tut:book -val result: User = fetchUser.runA[Eval].value +import monix.execution.Scheduler.Implicits.global + +val co = user.coeval + +co.value ``` In the previous examples, we: -- brought the implicit instance of `MonadError[Eval, Throwable]` into scope importing `fetch.implicits._` - created a fetch for a `User` using the `getUser` function -- interpreted the fetch to a `Eval[User]` using the syntax `runA` that delegate to `Fetch.run` +- interpreted the fetch to a `Task[User]` using the syntax `runA` that delegate to `Fetch.run` +- converted `Task[User]` to `Coeval[Either[CancelableFuture[User], User]]` and evaluated it to a `Right[User]` As you can see, the fetch was executed in one round to fetch the user and was finished after that. @@ -202,7 +203,13 @@ val fetchTwoUsers: Fetch[(User, User)] = for { When composing fetches with `flatMap` we are telling Fetch that the second one depends on the previous one, so it isn't able to make any optimizations. When running the above fetch, we will query the user data source in two rounds: one for the user with id 1 and another for the user with id 2. ```tut:book -val result: (User, User) = fetchTwoUsers.runA[Eval].value +val result: Task[(User, User)] = fetchTwoUsers.runA +``` + +Althought `fetchTwoUsers` needs two rounds to complete we still can execute it synchronously: + +```tut:book +result.coeval.value ``` ### Batching @@ -220,7 +227,20 @@ val fetchProduct: Fetch[(User, User)] = getUser(1).product(getUser(2)) Note how both ids (1 and 2) are requested in a single query to the data source when executing the fetch. ```tut:book -val result: (User, User) = fetchProduct.runA[Eval].value +val result: Task[(User, User)] = fetchProduct.runA +``` + +```tut:invisible +import scala.concurrent._ +import scala.concurrent.duration._ + +def await[A](t: Task[A]): A = Await.result(t.runAsync, Duration.Inf) +``` + +Let's pretend we have a function from `Task[A]` to `A` called `await`. + +```tut:book +await(result) ``` ### Deduplication @@ -234,7 +254,9 @@ val fetchDuped: Fetch[(User, User)] = getUser(1).product(getUser(1)) Note that when running the fetch, the identity 1 is only requested once even when it is needed by both fetches. ```tut:book -val result: (User, User) = fetchDuped.runA[Eval].value +val result: Task[(User, User)] = fetchDuped.runA + +await(result) ``` ### Caching @@ -254,7 +276,9 @@ val fetchCached: Fetch[(User, User)] = for { The above fetch asks for the same identity multiple times. Let's see what happens when executing it. ```tut:book -val result: (User, User) = fetchCached.runA[Eval].value +val result: Task[(User, User)] = fetchCached.runA + +await(result) ``` As you can see, the `User` with id 1 was fetched only once in a single round-trip. The next @@ -264,19 +288,17 @@ source. ## Combining data from multiple sources Now that we know about some of the optimizations that Fetch can perform to read data efficiently, -let's look at how we can combine more than one data source. Imagine that we are rendering a blog -and have the following types for posts and post information: +let's look at how we can combine more than one data source. + + +Imagine that we are rendering a blog and have the following types for posts: ```tut:silent type PostId = Int case class Post(id: PostId, author: UserId, content: String) -case class PostInfo(topic: String) ``` -As you can see, every `Post` has an author, but it refers to the author by its id. We'll implement two data sources: - -- one for retrieving a post given a post id -- another for retrieving post metadata given a post id +As you can see, every `Post` has an author, but it refers to the author by its id. We'll implement a data source for retrieving a post given a post id. ```tut:silent val postDatabase: Map[PostId, Post] = Map( @@ -286,44 +308,21 @@ val postDatabase: Map[PostId, Post] = Map( ) implicit object PostSource extends DataSource[PostId, Post]{ - override def fetchOne(id: PostId): Eval[Option[Post]] = { - Eval.later({ - println(s"Fetching post $id") + override def fetchOne(id: PostId): Task[Option[Post]] = { + Task({ + println(s"Fetching one post $id") postDatabase.get(id) }) } - override def fetchMany(ids: NonEmptyList[PostId]): Eval[Map[PostId, Post]] = { - Eval.later({ - println(s"Fetching posts $ids") + override def fetchMany(ids: NonEmptyList[PostId]): Task[Map[PostId, Post]] = { + Task({ + println(s"Fetching many posts $ids") postDatabase.filterKeys(ids.unwrap.contains) }) } } def getPost(id: PostId): Fetch[Post] = Fetch(id) - -val postInfoDatabase: Map[PostId, PostInfo] = Map( - 1 -> PostInfo("Run Wild, Run Free"), - 2 -> PostInfo("American Psycho"), - 3 -> PostInfo("Torrente 3") -) - -implicit object PostInfoSource extends DataSource[PostId, PostInfo]{ - override def fetchOne(id: PostId): Eval[Option[PostInfo]] = { - Eval.later({ - println(s"Fetching post info $id") - postInfoDatabase.get(id) - }) - } - override def fetchMany(ids: NonEmptyList[PostId]): Eval[Map[PostId, PostInfo]] = { - Eval.later({ - println(s"Fetching post info $ids") - postInfoDatabase.filterKeys(ids.unwrap.contains) - }) - } -} - -def getPostInfo(id: PostId): Fetch[PostInfo] = Fetch(id) ``` We can also implement a function for fetching a post's author given a post: @@ -344,7 +343,9 @@ val fetchMulti: Fetch[(Post, User)] = for { We can now run the previous fetch, querying the posts data source first and the user data source afterwards. ```tut:book -val result: (Post, User) = fetchMulti.runA[Eval].value +val result: Task[(Post, User)] = fetchMulti.runA + +await(result) ``` In the previous example, we fetched a post given its id and then fetched its author. This @@ -368,7 +369,9 @@ val fetchConcurrent: Fetch[(Post, User)] = getPost(1).product(getUser(2)) The above example combines data from two different sources, and the library knows they are independent. ```tut:book -val result: (Post, User) = fetchConcurrent.runA[Eval].value +val result: Task[(Post, User)] = fetchConcurrent.runA + +await(result) ``` Since we are interpreting the fetch to the `Id` monad, that doesn't give us any parallelism; the fetches @@ -396,7 +399,7 @@ val fetchSequence: Fetch[List[User]] = List(getUser(1), getUser(2), getUser(3)). Since `sequence` uses applicative operations internally, the library is able to perform optimizations across all the sequenced fetches. ```tut:book -val result: List[User] = fetchSequence.runA[Eval].value +await(fetchSequence.runA) ``` As you can see, requests to the user data source were batched, thus fetching all the data in one round. @@ -412,42 +415,9 @@ val fetchTraverse: Fetch[List[User]] = List(1, 2, 3).traverse(getUser) As you may have guessed, all the optimizations made by `sequence` still apply when using `traverse`. ```tut:book -val result: List[User] = fetchTraverse.runA[Eval].value +await(fetchTraverse.runA) ``` -# Interpreting a fetch to an async capable monad - -Although the examples use `Id` as the target Monad, `Fetch` is not limited to just `Id`, any monad `M` that -implements `MonadError[M, Throwable]` will do. Fetch provides `MonadError` instances for some existing monads like -`Future`, `cats.Id` and `cats.Eval` and it's easy to write your own. - -For practice, you'll be interpreting a fetch to an async capable monad like `Future` or `scalaz.concurrent.Task` to exploit -parallelism whenever we can make requests to multiple independent data sources at the same time. - -## Future - -For interpreting a fetch into a `Future` we must first import the `MonadError[Future, Throwable]` available in cats. - -```tut:silent -import cats.std.future._ - -import scala.concurrent._ -import scala.concurrent.duration._ -import scala.concurrent.ExecutionContext.Implicits.global - - -val fetchParallel: Fetch[(User, Post)] = (getUser(1) |@| getPost(1)).tupled -``` - -We can now interpret a fetch into a future: - -```tut:book -val fut: Future[(User, Post)] = fetchParallel.runA[Future] -Await.result(fut, 1.second) // this call blocks the current thread, don't do this at home! -``` - -Since futures run in a thread pool, both requests to the data sources run in parallel, each in its own logical thread. - # Caching As we have learned, Fetch caches intermediate results implicitly using a cache. You can @@ -459,14 +429,14 @@ one, and even implement a custom cache. We'll be using the default in-memory cache, prepopulated with some data. The cache key of an identity is calculated with the `DataSource`'s `identity` method. -```tut:silent +```tut:book val cache = InMemoryCache(UserSource.identity(1) -> User(1, "@dialelo")) ``` We can pass a cache as the second argument when running a fetch with `Fetch.run`. ```tut:book -val result: User = fetchUser.runA[Eval](cache).value +await(fetchUser.runA(cache)) ``` As you can see, when all the data is cached, no query to the data sources is executed since the results are available @@ -479,7 +449,7 @@ val fetchManyUsers: Fetch[List[User]] = List(1, 2, 3).traverse(getUser) If only part of the data is cached, the cached data won't be asked for: ```tut:book -val result: List[User] = fetchManyUsers.runA[Eval](cache).value +await(fetchManyUsers.runA(cache)) ``` ## Replaying a fetch without querying any data source @@ -493,9 +463,9 @@ Knowing this, we can replay a fetch reusing the cache of a previous one. The rep data sources. ```tut:book -val populatedCache = fetchManyUsers.runE[Eval].value.cache +val populatedCache = await(fetchManyUsers.runE.map(_.cache)) -val result: List[User] = fetchManyUsers.runA[Eval](populatedCache).value +val result: List[User] = await(fetchManyUsers.runA(populatedCache)) ``` ## Implementing a custom cache @@ -511,58 +481,64 @@ trait DataSourceCache { } ``` -Let's reimplement the in-memory cache found in Fetch; we'll write a case class that'll store the cache contents in an in-memory immutable map and implement `DataSourceCache`. +Let's implement a cache that forgets everything we store in it. ```tut:silent -case class MyInMemoryCache(state: Map[DataSourceIdentity, Any]) extends DataSourceCache { - override def get(k: DataSourceIdentity): Option[Any] = - state.get(k) - - override def update[A](k: DataSourceIdentity, v: A): MyInMemoryCache = - copy(state = state.updated(k, v)) +final case class ForgetfulCache() extends DataSourceCache { + override def get(k: DataSourceIdentity): Option[Any] = None + override def update[A](k: DataSourceIdentity, v: A): ForgetfulCache = this } ``` -Now that we have our cache implementation, we can populate it. Note how keys for the cache are tuples and are derived using the data source's `identity` method on identities. - -```tut:silent -val myCache = MyInMemoryCache(Map(UserSource.identity(1) -> User(1, "dialelo"))) -``` - We can now use our implementation of the cache when running a fetch. ```tut:book -val result: User = fetchUser.runA[Eval](myCache).value +val fetchSameTwice: Fetch[(User, User)] = for { + one <- getUser(1) + another <- getUser(1) +} yield (one, another) + +await(fetchSameTwice.runA(ForgetfulCache())) ``` # Error handling -As we mentioned before, when interpreting a fetch to a target monad `M`, an implicit instance of `MonadError[M, Throwable]` has to be -available. [MonadError](https://github.com/typelevel/cats/blob/master/core/src/main/scala/cats/MonadError.scala) gives us a few combinators -for working with errors, like `MonadError#raiseError` and `MonadError#attempt`. - -One of the most interesting combinators is `attempt`, which given a `M[A]` yields a `M[Throwable Xor A]`. Knowing this, we can run fetches -in the `Eval` monad to an `Xor` and not worry about exceptions. Let's create a fetch that always fails when executed: +`Task` provides a number of combinators for dealing with and recovering from errors. One of the most interesting combinators is `attempt`, which given a `Task[A]` yields a `Task[Throwable Xor A]`. Knowing this, we can run fetches +and not worry about exceptions. Let's create a fetch that always fails when executed: ```tut:silent val fetchError: Fetch[User] = (new Exception("Oh noes")).fetch ``` -We can now use the Eval MonadError's `attempt` to convert a fetch result into a disjuntion and avoid throwing exceptions. +If we try to execute it the exception will be thrown. -```tut:book +```tut:fail +await(fetchError.runA) +``` + +We can use the `ApplicativeError[Task, Throwable]#attempt` to convert a fetch result into a disjuntion and avoid throwing exceptions. Fetch provides an implicit instance of ApplicativeError, let's import `fetch.implicits._` to have it available. + +```tut:silent +import fetch.implicits._ +import cats.ApplicativeError import cats.data.Xor -import cats.MonadError +``` -val ME = implicitly[MonadError[Eval, Throwable]] +Now we can convert `Task[User]` into `Task[Throwable Xor User]` and capture exceptions in the left of the disjunction. -val result: Eval[User] = fetchError.runA[Eval] -val safeResult: Eval[Throwable Xor User] = ME.attempt(result) -val finalValue: Throwable Xor User = safeResult.value +```tut:book +val safeResult: Task[Throwable Xor User] = ApplicativeError[Task, Throwable].attempt(fetchError.runA) +val finalValue: Throwable Xor User = await(safeResult) +``` + +And more succintly with Cat's applicative error syntax. + +```tut:book +import cats.syntax.applicativeError._ + +await(fetchError.runA.attempt) ``` -In the above example, we didn't use `Id` since interpreting a fetch to `Id` throws the exception, and we can't capture it with the -combinators in `MonadError`. ## Missing identities @@ -573,6 +549,8 @@ fail. Whenever a fetch fails, a `FetchFailure` exception is thrown. The `FetchFailure` will have the environment, which gives you information about the execution of the fetch. +TODO: show how to diagnose which identity was missing + # Syntax ## Implicit syntax @@ -588,26 +566,24 @@ Plain values can be lifted to the Fetch monad with `value.fetch`: val fetchPure: Fetch[Int] = 42.fetch ``` -Executing a pure fetch doesn't query any data source, as expected. +Executing a pure fetch doesn't query any data source and can be run synchronously, as expected. ```tut:book -val result: Int = fetchPure.runA[Eval].value +fetchPure.runA.coeval.value ``` ### error -Errors can also be lifted to the Fetch monad via `exception.fetch`. Note that interpreting -an errorful fetch to `Eval` won't throw the exception unless we access the value with the `.value` method. - -A safer way to deal with errors is to use MonadError's `attempt` to turn the exception into a `Xor.Left` value: +Errors can also be lifted to the Fetch monad via `exception.fetch`. ```tut:silent -val ME = implicitly[MonadError[Eval, Throwable]] +val fetchFail: Fetch[Int] = Fetch.error(new Exception("Something went terribly wrong")) +``` + +Note that interpreting an errorful fetch to `Task` won't throw the exception until we execute it. -val fetchFail: Fetch[Int] = (new Exception("Something went terribly wrong")).fetch[Int] -val result: Eval[Int] = fetchFail.runA[Eval] -val safeResult: Eval[Throwable Xor Int] = ME.attempt(result) -val finalValue: Throwable Xor Int = safeResult.value +```tut:fail +await(fetchFail.runA) ``` ### join @@ -621,31 +597,31 @@ val fetchJoined: Fetch[(Post, User)] = getPost(1).join(getUser(2)) If the fetches are to the same data source they will be batched; if they aren't, they will be evaluated at the same time. ```tut:book -val result: (Post, User) = fetchJoined.runA[Eval].value +await(fetchJoined.runA) ``` ### runA -Run directly any fetch to a target any target `Monad` with a `MonadError` instance in scope `fetch1.runA[Eval]`. +Run directly any fetch to a `Task` with `fetch1.runA`. -```tut:silent -val post: Eval[Post] = getPost(1).runA[Eval] +```tut:book +await(getPost(1).runA) ``` ### runE -Extract a fetch an get it's runtime environment `fetch1.runE[Eval]`. +Extract a fetch an get it's runtime environment `fetch1.runE`. -```tut:silent -val env: Eval[FetchEnv] = getPost(1).runE[Eval] +```tut:book +await(getPost(1).runE) ``` ### runF -Run a fetch obtaining the environment and final value `fetch1.runF[Eval]`. +Run a fetch obtaining the environment and final value `fetch1.runF`. -```tut:silent -val env: Eval[(FetchEnv, Post)] = getPost(1).runF[Eval] +```tut:book +await(getPost(1).runF) ``` ## Companion object @@ -664,26 +640,24 @@ Plain values can be lifted to the Fetch monad with `Fetch#pure`: val fetchPure: Fetch[Int] = Fetch.pure(42) ``` -Executing a pure fetch doesn't query any data source, as expected. +Executing a pure fetch doesn't query any data source and can be run synchronously, as expected. ```tut:book -val result: Int = Fetch.run[Eval](fetchPure).value +Fetch.run(fetchPure).coeval.value ``` ### error -Errors can also be lifted to the Fetch monad, in this case with `Fetch#error`. Note that interpreting -an errorful fetch to `Eval` won't throw the exception unless we access the value with the `.value` method. +Errors can also be lifted to the Fetch monad via `Fetch#error`. -A safer way to deal with errors is to use MonadError's `attempt` to turn the exception into a `Xor.Left` value: +```tut:silent +val fetchFail: Fetch[Int] = Fetch.error(new Exception("Something went terribly wrong")) +``` -```tut:book -val ME = implicitly[MonadError[Eval, Throwable]] +Note that interpreting an errorful fetch to `Task` won't throw the exception until we execute it. -val fetchFail: Fetch[Int] = Fetch.error(new Exception("Something went terribly wrong")) -val result: Eval[Int] = fetchFail.runA[Eval] -val safeResult: Eval[Throwable Xor Int] = ME.attempt(result) -val finalValue: Throwable Xor Int = safeResult.value +```tut:fail +await(Fetch.run(fetchFail)) ``` ### join @@ -697,7 +671,7 @@ val fetchJoined: Fetch[(Post, User)] = Fetch.join(getPost(1), getUser(2)) If the fetches are to the same data source they will be batched; if they aren't, they will be evaluated at the same time. ```tut:book -val result: (Post, User) = Fetch.run[Eval](fetchJoined).value +await(Fetch.run(fetchJoined)) ``` ### sequence @@ -712,7 +686,7 @@ val fetchSequence: Fetch[List[User]] = Fetch.sequence(List(getUser(1), getUser(2 Note that `Fetch#sequence` is not as general as the `sequence` method from `Traverse`, but performs the same optimizations. ```tut:book -val result: List[User] = Fetch.run[Eval](fetchSequence).value +await(Fetch.run(fetchSequence)) ``` ### traverse @@ -726,7 +700,7 @@ val fetchTraverse: Fetch[List[User]] = Fetch.traverse(List(1, 2, 3))(getUser) Note that `Fetch#traverse` is not as general as the `traverse` method from `Traverse`, but performs the same optimizations. ```tut:book -val result: List[User] = Fetch.run[Eval](fetchTraverse).value +await(Fetch.run(fetchTraverse)) ``` ## cats @@ -757,7 +731,7 @@ val fetchThree: Fetch[(Post, User, Post)] = (getPost(1) |@| getUser(2) |@| getPo Notice how the queries to posts are batched. ```tut:book -val result: (Post, User, Post) = fetchThree.runA[Eval].value +await(fetchThree.runA) ``` More interestingly, we can use it to apply a pure function to the results of various @@ -768,7 +742,7 @@ val fetchFriends: Fetch[String] = (getUser(1) |@| getUser(2)).map({ (one, other) s"${one.username} is friends with ${other.username}" }) -val result: String = fetchFriends.runA[Eval].value +await(fetchFriends.runA) ``` The above example is equivalent to the following using the `Fetch#join` method: @@ -778,9 +752,30 @@ val fetchFriends: Fetch[String] = Fetch.join(getUser(1), getUser(2)).map({ case s"${one.username} is friends with ${other.username}" }) -val result: String = fetchFriends.runA[Eval].value +await(fetchFriends.runA) ``` +# Choosing a scheduler + +The [Monix docs](http://monix.io/docs/2x/execution/scheduler.html) go in great detail about how scheduling works and you should refer there for the documentation. + +## JVM (Scala) + +When reading data in the JVM, you may want to create an unbounded thread pool with `monix.execution.Scheduler.io` for running your fetches. + +```tut:book +import monix.execution.Scheduler + +// unbounded thread pool for I/O bound tasks +implicit val ioScheduler: Scheduler = Scheduler.io(name="my-io-scheduler") + +await(fetchFriends.runA) +``` + +## JS (Scala.js) + +When needing to choose a scheduler in a JS environment with Scala.js refer to the [monix docs](http://monix.io/docs/2x/execution/scheduler.html#builders-for-javascript). + # Resources - [Code](https://github.com/47deg/fetch) on GitHub. @@ -794,4 +789,5 @@ Fetch stands on the shoulders of giants: - [Haxl](https://github.com/facebook/haxl) is Facebook's implementation (Haskell) of the [original paper Fetch is based on](http://community.haskell.org/~simonmar/papers/haxl-icfp14.pdf). - [Clump](http://getclump.io) has inspired the signature of the `DataSource#fetch` method. - [Stitch](https://engineering.twitter.com/university/videos/introducing-stitch) is an in-house Twitter library that is not open source but has inspired Fetch's high-level API. - +- [Cats](http://typelevel.org/cats/), a library for functional programming in Scala. +- [Monix](https://monix.io) high-performance and multiplatform (Scala / Scala.js) asynchronous programming library. diff --git a/docs/src/tut/index.md b/docs/src/tut/index.md index bae78a63..6264dc59 100644 --- a/docs/src/tut/index.md +++ b/docs/src/tut/index.md @@ -22,6 +22,16 @@ Or, if using Scala.js (0.6.x): "com.fortysevendeg" %%% "fetch" %% "0.2.0" ``` +```tut:invisible +val out = Console.out + +def println(msg: String): Unit = { + Console.withOut(out) { + Console.println(msg) + } +} +``` + ## Remote data Fetch is a library for making access to data both simple & efficient. Fetch is especially useful when querying data that @@ -40,31 +50,33 @@ Data Sources take two type parameters: ```scala +import monix.eval.Task +import cats.data.NonEmptyList + trait DataSource[Identity, Result]{ - def fetchOne(id: Identity): Eval[Option[Result]] - def fetchMany(ids: NonEmptyList[Identity]): Eval[Map[Identity, Result]] + def fetchOne(id: Identity): Task[Option[Result]] + def fetchMany(ids: NonEmptyList[Identity]): Task[Map[Identity, Result]] } ``` We'll implement a dummy data source that can convert integers to strings. For convenience, we define a `fetchString` function that lifts identities (`Int` in our dummy data source) to a `Fetch`. ```tut:silent -import cats.Eval +import monix.eval.Task import cats.data.NonEmptyList import cats.std.list._ - import fetch._ implicit object ToStringSource extends DataSource[Int, String]{ - override def fetchOne(id: Int): Eval[Option[String]] = { - Eval.later({ - println(s"ToStringSource $id") + override def fetchOne(id: Int): Task[Option[String]] = { + Task.now({ + println(s"[${Thread.currentThread.getId}] One ToString $id") Option(id.toString) }) } - override def fetchMany(ids: NonEmptyList[Int]): Eval[Map[Int, String]] = { - Eval.later({ - println(s"ToStringSource $ids") + override def fetchMany(ids: NonEmptyList[Int]): Task[Map[Int, String]] = { + Task.now({ + println(s"[${Thread.currentThread.getId}] Many ToString $ids") ids.unwrap.map(i => (i, i.toString)).toMap }) } @@ -78,51 +90,70 @@ def fetchString(n: Int): Fetch[String] = Fetch(n) // or, more explicitly: Fetch( Now that we can convert `Int` values to `Fetch[String]`, let's try creating a fetch. ```tut:silent -import fetch.implicits._ import fetch.syntax._ val fetchOne: Fetch[String] = fetchString(1) ``` -Now that we have created a fetch, we can run it to a target monad. Note that the target monad (`Eval` in our example) needs to implement `MonadError[M, Throwable]`, we provide an instance for `Eval` in `fetch.implicits._`, that's why we imported it. +Now that we have created a fetch, we can run it to a `Task`. Note that when we create a task we are not computing any value yet. Having a `Task` instance allows us to try to run it synchronously or asynchronously, choosing a scheduler. + +```tut:book +val result: Task[String] = fetchOne.runA +``` + +We can try to run `result` synchronously with `Task#coeval`. ```tut:book -val result: String = fetchOne.runA[Eval].value +import monix.execution.Scheduler.Implicits.global + +result.coeval.value ``` +Since we calculated the results eagerly using `Task#now`, we can run this fetch synchronously. + As you can see in the previous example, the `ToStringSource` is queried once to get the value of 1. ## Batching Multiple fetches to the same data source are automatically batched. For illustrating it, we are going to compose three independent fetch results as a tuple. -```tut:silent +```tut:book import cats.syntax.cartesian._ val fetchThree: Fetch[(String, String, String)] = (fetchString(1) |@| fetchString(2) |@| fetchString(3)).tupled +val result: Task[(String, String, String)] = fetchThree.runA +``` + +```tut:invisible +import scala.concurrent._ +import scala.concurrent.duration._ + +def await[A](t: Task[A]): A = Await.result(t.runAsync, Duration.Inf) ``` -When executing the above fetch, note how the three identities get batched and the data source is only queried once. +When executing the above fetch, note how the three identities get batched and the data source is only queried once. Let's pretend we have a function from `Task[A]` to `A` called `await`. ```tut:book -val result: (String, String, String) = fetchThree.runA[Eval].value +await(result) ``` -## Concurrency +## Parallelism + +If we combine two independent fetches from different data sources, the fetches can be run in parallel. First, let's add a data source that fetches a string's size. -If we combine two independent fetches from different data sources, the fetches will be run concurrently. First, let's add a data source that fetches a string's size. +This time, instead of creating the results with `Task#now` we are going to do it with `Task#apply` for emulating an asynchronous data source. ```tut:silent implicit object LengthSource extends DataSource[String, Int]{ - override def fetchOne(id: String): Eval[Option[Int]] = { - Eval.later({ - println(s"LengthSource $id") + override def fetchOne(id: String): Task[Option[Int]] = { + Task({ + println(s"[${Thread.currentThread.getId}] One Length $id") Option(id.size) }) } - override def fetchMany(ids: NonEmptyList[String]): Eval[Map[String, Int]] = { - Eval.later({ - println(s"LengthSource $ids") + override def fetchMany(ids: NonEmptyList[String]): Task[Map[String, Int]] = { + Task({ + println(s"[${Thread.currentThread.getId}] Many Length $ids") ids.unwrap.map(i => (i, i.size)).toMap }) } @@ -133,21 +164,22 @@ def fetchLength(s: String): Fetch[Int] = Fetch(s) And now we can easily receive data from the two sources in a single fetch. -```tut:silent +```tut:book val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled +val result = fetchMulti.runA ``` -Note how the two independent data fetches are run concurrently, minimizing the latency cost of querying the two data sources. If our target monad was a concurrency monad like `Future`, they'd run in parallel, each in its own logical thread. +Note how the two independent data fetches are run in parallel, minimizing the latency cost of querying the two data sources. ```tut:book -val result: (String, Int) = fetchMulti.runA[Eval].value +await(result) ``` ## Caching When fetching an identity, subsequent fetches for the same identity are cached. Let's try creating a fetch that asks for the same identity twice. -```tut:silent +```tut:book val fetchTwice: Fetch[(String, String)] = for { one <- fetchString(1) two <- fetchString(1) @@ -157,6 +189,6 @@ val fetchTwice: Fetch[(String, String)] = for { While running it, notice that the data source is only queried once. The next time the identity is requested it's served from the cache. ```tut:book -val result: (String, String) = fetchTwice.runA[Eval].value +val result: (String, String) = await(fetchTwice.runA) ``` diff --git a/shared/src/main/scala/implicits.scala b/shared/src/main/scala/implicits.scala index b500d86d..589714e3 100644 --- a/shared/src/main/scala/implicits.scala +++ b/shared/src/main/scala/implicits.scala @@ -16,4 +16,20 @@ package fetch -object implicits {} +import cats.ApplicativeError +import monix.eval.Task + +object implicits { + implicit val fetchTaskApplicativeError: ApplicativeError[Task, Throwable] = + new ApplicativeError[Task, Throwable] { + def pure[A](x: A): monix.eval.Task[A] = + Task.pure(x) + def ap[A, B](ff: monix.eval.Task[A => B])(fa: monix.eval.Task[A]): monix.eval.Task[B] = + Task.mapBoth(ff, fa)((f, x) => f(x)) + def handleErrorWith[A](fa: monix.eval.Task[A])( + f: Throwable => monix.eval.Task[A]): monix.eval.Task[A] = + fa.onErrorHandleWith(f) + def raiseError[A](e: Throwable): monix.eval.Task[A] = + Task.raiseError(e) + } +} diff --git a/shared/src/test/scala/FetchTests.scala b/shared/src/test/scala/FetchTests.scala index c9566ee7..2f23d809 100644 --- a/shared/src/test/scala/FetchTests.scala +++ b/shared/src/test/scala/FetchTests.scala @@ -103,8 +103,8 @@ object TestHelper { val promise: Promise[A] = Promise() task.runAsync( new Callback[A] { - def onSuccess(value: A): Unit = { promise.success(value); () } - def onError(ex: Throwable): Unit = { promise.failure(ex); () } + def onSuccess(value: A): Unit = { promise.trySuccess(value); () } + def onError(ex: Throwable): Unit = { promise.tryFailure(ex); () } }) promise.future } diff --git a/tut/README.md b/tut/README.md index 65d1d62c..c0880eb1 100644 --- a/tut/README.md +++ b/tut/README.md @@ -12,20 +12,27 @@ A library for Simple & Efficient data access in Scala and Scala.js Add the following dependency to your project's build file. +For Scala 2.11.x: + ```scala "com.fortysevendeg" %% "fetch" %% "0.2.0" ``` -Or, if using Scala.js: +Or, if using Scala.js (0.6.x): ```scala "com.fortysevendeg" %%% "fetch" %% "0.2.0" ``` -Fetch is available for the following Scala and Scala.js versions: +```tut:invisible +val out = Console.out -- Scala 2.11.x -- Scala.js 0.6.x +def println(msg: String): Unit = { + Console.withOut(out) { + Console.println(msg) + } +} +``` ## Remote data @@ -34,7 +41,7 @@ has a latency cost, such as databases or web services. ## Define your data sources -For telling `Fetch` how to get the data you want, you must implement the `DataSource` typeclass. Data sources have a `fetch` method that +To tell Fetch how to get the data you want, you must implement the `DataSource` typeclass. Data sources have a `fetch` method that defines how to fetch such a piece of data. Data Sources take two type parameters: @@ -45,31 +52,33 @@ Data Sources take two type parameters: ```scala +import monix.eval.Task +import cats.data.NonEmptyList + trait DataSource[Identity, Result]{ - def fetchOne(id: Identity): Eval[Option[Result]] - def fetchMany(ids: NonEmptyList[Identity]): Eval[Map[Identity, Result]] + def fetchOne(id: Identity): Task[Option[Result]] + def fetchMany(ids: NonEmptyList[Identity]): Task[Map[Identity, Result]] } ``` We'll implement a dummy data source that can convert integers to strings. For convenience, we define a `fetchString` function that lifts identities (`Int` in our dummy data source) to a `Fetch`. ```tut:silent -import cats.Eval +import monix.eval.Task import cats.data.NonEmptyList import cats.std.list._ - import fetch._ implicit object ToStringSource extends DataSource[Int, String]{ - override def fetchOne(id: Int): Eval[Option[String]] = { - Eval.later({ - println(s"ToStringSource $id") + override def fetchOne(id: Int): Task[Option[String]] = { + Task.now({ + println(s"[${Thread.currentThread.getId}] One ToString $id") Option(id.toString) }) } - override def fetchMany(ids: NonEmptyList[Int]): Eval[Map[Int, String]] = { - Eval.later({ - println(s"ToStringSource $ids") + override def fetchMany(ids: NonEmptyList[Int]): Task[Map[Int, String]] = { + Task.now({ + println(s"[${Thread.currentThread.getId}] Many ToString $ids") ids.unwrap.map(i => (i, i.toString)).toMap }) } @@ -83,51 +92,70 @@ def fetchString(n: Int): Fetch[String] = Fetch(n) // or, more explicitly: Fetch( Now that we can convert `Int` values to `Fetch[String]`, let's try creating a fetch. ```tut:silent -import fetch.implicits._ import fetch.syntax._ val fetchOne: Fetch[String] = fetchString(1) ``` -Now that we have created a fetch, we can run it to a target monad. Note that the target monad (`Eval` in our example) needs to implement `MonadError[M, Throwable]`, we provide an instance for `Eval` in `fetch.implicits._`, that's why we imported it. +Now that we have created a fetch, we can run it to a `Task`. Note that when we create a task we are not computing any value yet. Having a `Task` instance allows us to try to run it synchronously or asynchronously, choosing a scheduler. + +```tut:book +val result: Task[String] = fetchOne.runA +``` + +We can try to run `result` synchronously with `Task#coeval`. ```tut:book -val result: String = fetchOne.runA[Eval].value +import monix.execution.Scheduler.Implicits.global + +result.coeval.value ``` +Since we calculated the results eagerly using `Task#now`, we can run this fetch synchronously. + As you can see in the previous example, the `ToStringSource` is queried once to get the value of 1. ## Batching Multiple fetches to the same data source are automatically batched. For illustrating it, we are going to compose three independent fetch results as a tuple. -```tut:silent +```tut:book import cats.syntax.cartesian._ val fetchThree: Fetch[(String, String, String)] = (fetchString(1) |@| fetchString(2) |@| fetchString(3)).tupled +val result: Task[(String, String, String)] = fetchThree.runA +``` + +```tut:invisible +import scala.concurrent._ +import scala.concurrent.duration._ + +def await[A](t: Task[A]): A = Await.result(t.runAsync, Duration.Inf) ``` -When executing the above fetch, note how the three identities get batched and the data source is only queried once. +When executing the above fetch, note how the three identities get batched and the data source is only queried once. Let's pretend we have a function from `Task[A]` to `A` called `await`. ```tut:book -val result: (String, String, String) = fetchThree.runA[Eval].value +await(result) ``` -## Concurrency +## Parallelism -If we combine two independent fetches from different data sources, the fetches will be run concurrently. First, let's add a data source that fetches a string's size. +If we combine two independent fetches from different data sources, the fetches can be run in parallel. First, let's add a data source that fetches a string's size. + +This time, instead of creating the results with `Task#now` we are going to do it with `Task#apply` for emulating an asynchronous data source. ```tut:silent implicit object LengthSource extends DataSource[String, Int]{ - override def fetchOne(id: String): Eval[Option[Int]] = { - Eval.later({ - println(s"LengthSource $id") + override def fetchOne(id: String): Task[Option[Int]] = { + Task({ + println(s"[${Thread.currentThread.getId}] One Length $id") Option(id.size) }) } - override def fetchMany(ids: NonEmptyList[String]): Eval[Map[String, Int]] = { - Eval.later({ - println(s"LengthSource $ids") + override def fetchMany(ids: NonEmptyList[String]): Task[Map[String, Int]] = { + Task({ + println(s"[${Thread.currentThread.getId}] Many Length $ids") ids.unwrap.map(i => (i, i.size)).toMap }) } @@ -138,21 +166,22 @@ def fetchLength(s: String): Fetch[Int] = Fetch(s) And now we can easily receive data from the two sources in a single fetch. -```tut:silent +```tut:book val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled +val result = fetchMulti.runA ``` -Note how the two independent data fetches are run concurrently, minimizing the latency cost of querying the two data sources. If our target monad was a concurrency monad like `Future`, they'd run in parallel, each in its own logical thread. +Note how the two independent data fetches are run in parallel, minimizing the latency cost of querying the two data sources. ```tut:book -val result: (String, Int) = fetchMulti.runA[Eval].value +await(result) ``` ## Caching When fetching an identity, subsequent fetches for the same identity are cached. Let's try creating a fetch that asks for the same identity twice. -```tut:silent +```tut:book val fetchTwice: Fetch[(String, String)] = for { one <- fetchString(1) two <- fetchString(1) @@ -162,6 +191,5 @@ val fetchTwice: Fetch[(String, String)] = for { While running it, notice that the data source is only queried once. The next time the identity is requested it's served from the cache. ```tut:book -val result: (String, String) = fetchTwice.runA[Eval].value +val result: (String, String) = await(fetchTwice.runA) ``` - From e837ad8b0d511c0db0e393aabd69270915a5248b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sun, 29 May 2016 19:07:28 +0200 Subject: [PATCH 04/40] Uniquify implicit name --- shared/src/main/scala/fetch.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/shared/src/main/scala/fetch.scala b/shared/src/main/scala/fetch.scala index a9cf1aee..27ef548d 100644 --- a/shared/src/main/scala/fetch.scala +++ b/shared/src/main/scala/fetch.scala @@ -50,7 +50,7 @@ object `package` { type FetchInterpreter[A] = StateT[Task, FetchEnv, A] - implicit val taskMonad: Monad[Task] = new Monad[Task] with Applicative[Task] { + implicit val fetchTaskMonad: Monad[Task] = new Monad[Task] with Applicative[Task] { def pure[A](x: A): Task[A] = Task.pure(x) override def ap[A, B](ff: Task[A => B])(fa: Task[A]): Task[B] = From 2dfa3b4dbf6403383729fc55bc078b8790d49fa9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sun, 29 May 2016 19:08:42 +0200 Subject: [PATCH 05/40] Add tut compilation to CI --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index cb41b5cf..5c4bd616 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,10 +4,9 @@ scala: jdk: - oraclejdk8 script: - - sbt coverage 'fetchJVM/test' + - sbt coverage 'fetchJVM/test' 'fetchJVM/coverageReport' - sbt 'fetchJS/test' - sbt 'docs/tut' - sbt 'readme/tut' after_success: - - sbt 'fetchJVM/coverageReport' - bash <(curl -s https://codecov.io/bash) -t 47609994-e0cd-4f3b-a28d-eb558142c3bb From 6a14cc4bfbfbc9cd2dbfe938236cafa77aa2aab4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sun, 29 May 2016 19:20:31 +0200 Subject: [PATCH 06/40] Minor changes in docs --- README.md | 3 +-- docs/src/tut/index.md | 3 +-- tut/README.md | 3 +-- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index bc356e21..f8c6ea65 100644 --- a/README.md +++ b/README.md @@ -36,8 +36,7 @@ has a latency cost, such as databases or web services. ## Define your data sources -For telling `Fetch` how to get the data you want, you must implement the `DataSource` typeclass. Data sources have a `fetch` method that -defines how to fetch such a piece of data. +To tell `Fetch` how to get the data you want, you must implement the `DataSource` typeclass. Data sources have `fetchOne` and `fetchMany` methods that define how to fetch such a piece of data. Data Sources take two type parameters: diff --git a/docs/src/tut/index.md b/docs/src/tut/index.md index 6264dc59..d2785d00 100644 --- a/docs/src/tut/index.md +++ b/docs/src/tut/index.md @@ -39,8 +39,7 @@ has a latency cost, such as databases or web services. ## Define your data sources -To tell Fetch how to get the data you want, you must implement the `DataSource` typeclass. Data sources have a `fetch` method that -defines how to fetch such a piece of data. +To tell Fetch how to get the data you want, you must implement the `DataSource` typeclass. Data sources have `fetchOne` and `fetchMany` methods that define how to fetch such a piece of data. Data Sources take two type parameters: diff --git a/tut/README.md b/tut/README.md index c0880eb1..41f71a75 100644 --- a/tut/README.md +++ b/tut/README.md @@ -41,8 +41,7 @@ has a latency cost, such as databases or web services. ## Define your data sources -To tell Fetch how to get the data you want, you must implement the `DataSource` typeclass. Data sources have a `fetch` method that -defines how to fetch such a piece of data. +To tell Fetch how to get the data you want, you must implement the `DataSource` typeclass. Data sources have `fetchOne` and `fetchMany` methods that define how to fetch such a piece of data. Data Sources take two type parameters: From c22d13c88152c16407923b6ef2e4d7f18ebc6829 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sun, 29 May 2016 19:23:06 +0200 Subject: [PATCH 07/40] Update README --- README.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index f8c6ea65..3caeae6e 100644 --- a/README.md +++ b/README.md @@ -105,7 +105,7 @@ import monix.execution.Scheduler.Implicits.global // import monix.execution.Scheduler.Implicits.global result.coeval.value -// [1026] One ToString 1 +// [237] One ToString 1 // res3: Either[monix.execution.CancelableFuture[String],String] = Right(1) ``` @@ -122,7 +122,7 @@ import cats.syntax.cartesian._ // import cats.syntax.cartesian._ val fetchThree: Fetch[(String, String, String)] = (fetchString(1) |@| fetchString(2) |@| fetchString(3)).tupled -// fetchThree: fetch.Fetch[(String, String, String)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List(2, 3)),ToStringSource$@77b748dd)))),),) +// fetchThree: fetch.Fetch[(String, String, String)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List(2, 3)),ToStringSource$@4afe2c89)))),),) val result: Task[(String, String, String)] = fetchThree.runA // result: monix.eval.Task[(String, String, String)] = BindSuspend(,) @@ -135,7 +135,7 @@ When executing the above fetch, note how the three identities get batched and th ```scala await(result) -// [1026] Many ToString OneAnd(1,List(2, 3)) +// [237] Many ToString OneAnd(1,List(2, 3)) // res4: (String, String, String) = (1,2,3) ``` @@ -168,7 +168,7 @@ And now we can easily receive data from the two sources in a single fetch. ```scala val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled -// fetchMulti: fetch.Fetch[(String, Int)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List()),ToStringSource$@77b748dd), FetchMany(OneAnd(one,List()),LengthSource$@741119c5)))),),) +// fetchMulti: fetch.Fetch[(String, Int)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List()),ToStringSource$@4afe2c89), FetchMany(OneAnd(one,List()),LengthSource$@671df379)))),),) val result = fetchMulti.runA // result: monix.eval.Task[(String, Int)] = BindSuspend(,) @@ -178,8 +178,8 @@ Note how the two independent data fetches are run in parallel, minimizing the la ```scala await(result) -// [1026] Many ToString OneAnd(1,List()) -// [1027] Many Length OneAnd(one,List()) +// [237] Many ToString OneAnd(1,List()) +// [183] Many Length OneAnd(one,List()) // res6: (String, Int) = (1,3) ``` @@ -192,13 +192,13 @@ val fetchTwice: Fetch[(String, String)] = for { one <- fetchString(1) two <- fetchString(1) } yield (one, two) -// fetchTwice: fetch.Fetch[(String, String)] = Gosub(Suspend(FetchOne(1,ToStringSource$@77b748dd)),) +// fetchTwice: fetch.Fetch[(String, String)] = Gosub(Suspend(FetchOne(1,ToStringSource$@4afe2c89)),) ``` While running it, notice that the data source is only queried once. The next time the identity is requested it's served from the cache. ```scala val result: (String, String) = await(fetchTwice.runA) -// [1026] One ToString 1 +// [237] One ToString 1 // result: (String, String) = (1,1) ``` From e2b1647b02722d097dcd90b2aa11ec83ae60fd06 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sun, 29 May 2016 19:24:41 +0200 Subject: [PATCH 08/40] :fire: --- docs/src/tut/docs.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/src/tut/docs.md b/docs/src/tut/docs.md index 340d1024..776d5089 100644 --- a/docs/src/tut/docs.md +++ b/docs/src/tut/docs.md @@ -549,8 +549,6 @@ fail. Whenever a fetch fails, a `FetchFailure` exception is thrown. The `FetchFailure` will have the environment, which gives you information about the execution of the fetch. -TODO: show how to diagnose which identity was missing - # Syntax ## Implicit syntax From 5e48ae01c832854c528034b4f877c9399ef22d0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sun, 29 May 2016 17:28:35 +0200 Subject: [PATCH 09/40] Only use fetchMany when there are batches --- shared/src/main/scala/fetch.scala | 88 ++++++++++++++-------- shared/src/main/scala/interpreters.scala | 94 +++++++++++++----------- shared/src/test/scala/FetchTests.scala | 14 +++- 3 files changed, 121 insertions(+), 75 deletions(-) diff --git a/shared/src/main/scala/fetch.scala b/shared/src/main/scala/fetch.scala index 27ef548d..d87cd444 100644 --- a/shared/src/main/scala/fetch.scala +++ b/shared/src/main/scala/fetch.scala @@ -27,17 +27,47 @@ import cats.std.list._ import cats.std.option._ import cats.syntax.traverse._ +/** Requests in Fetch Free monad. + */ +sealed trait FetchRequest[I, A] extends Product with Serializable { + def fullfilledBy(cache: DataSourceCache): Boolean + def missingIdentities(cache: DataSourceCache): List[I] + def dataSource: DataSource[I, A] + def identities: NonEmptyList[I] +} + /** * Primitive operations in the Fetch Free monad. */ sealed abstract class FetchOp[A] extends Product with Serializable -final case class Cached[A](a: A) extends FetchOp[A] -final case class FetchOne[I, A](a: I, ds: DataSource[I, A]) extends FetchOp[A] +final case class Cached[A](a: A) extends FetchOp[A] +final case class FetchOne[I, A](a: I, ds: DataSource[I, A]) + extends FetchOp[A] + with FetchRequest[I, A] { + override def fullfilledBy(cache: DataSourceCache): Boolean = { + cache.get(ds.identity(a)).isDefined + } + override def missingIdentities(cache: DataSourceCache): List[I] = { + cache.get(ds.identity(a)).fold(List(a))((res: Any) => Nil) + } + override def dataSource: DataSource[I, A] = ds + override def identities: NonEmptyList[I] = NonEmptyList(a, Nil) +} final case class FetchMany[I, A](as: NonEmptyList[I], ds: DataSource[I, A]) extends FetchOp[List[A]] -final case class Concurrent(as: List[FetchMany[_, _]]) extends FetchOp[DataSourceCache] -final case class FetchError[A, E <: Throwable](err: E) extends FetchOp[A] + with FetchRequest[I, A] { + override def fullfilledBy(cache: DataSourceCache): Boolean = { + as.forall((i: I) => cache.get(ds.identity(i)).isDefined) + } + override def missingIdentities(cache: DataSourceCache): List[I] = { + as.unwrap.distinct.filterNot(i => cache.get(ds.identity(i)).isDefined) + } + override def dataSource: DataSource[I, A] = ds + override def identities: NonEmptyList[I] = as +} +final case class Concurrent(as: List[FetchRequest[_, _]]) extends FetchOp[DataSourceCache] +final case class FetchError[A](err: Throwable) extends FetchOp[A] object `package` { type DataSourceName = String @@ -99,41 +129,50 @@ object `package` { } } - private[this] def deps[A](f: Fetch[_]): List[FetchOp[_]] = { - type FM = List[FetchOp[_]] - + private[this] def deps[A](f: Fetch[_]): List[FetchRequest[_, _]] = { f.foldMap[Const[FM, ?]](new (FetchOp ~> Const[FM, ?]) { def apply[X](x: FetchOp[X]): Const[FM, X] = x match { - case one @ FetchOne(id, ds) => - Const(List(FetchMany(NonEmptyList(id), ds.asInstanceOf[DataSource[Any, A]]))) - case conc @ Concurrent(as) => Const(as.asInstanceOf[FM]) - case cach @ Cached(a) => Const(List(cach)) - case _ => Const(List()) + case one @ FetchOne(id, ds) => Const(List(one)) + case conc @ Concurrent(as) => Const(as.asInstanceOf[FM]) + case cach @ Cached(a) => Const(List(cach)) + case _ => Const(List()) } })(DM) .getConst + .collect({ + case one @ FetchOne(_, _) => one + case many @ FetchMany(_, _) => many + }) } - private[this] def combineDeps(ds: List[FetchOp[_]]): List[FetchMany[_, _]] = { + private[this] def combineDeps(ds: List[FetchRequest[_, _]]): List[FetchRequest[_, _]] = { ds.foldLeft(Map.empty[DataSource[_, _], NonEmptyList[Any]])((acc, op) => op match { case one @ FetchOne(id, ds) => - acc.updated( - ds, - acc.get(ds).fold(NonEmptyList(id))(accids => accids.combine(NonEmptyList(id)))) + acc.updated(ds, + acc + .get(ds) + .fold(NonEmptyList(id): NonEmptyList[Any])(accids => + accids.combine(NonEmptyList(id)))) case many @ FetchMany(ids, ds) => - acc.updated(ds, acc.get(ds).fold(ids)(accids => accids.combine(ids))) + acc.updated(ds, + acc + .get(ds) + .fold(ids.asInstanceOf[NonEmptyList[Any]])(accids => + accids.combine(ids.asInstanceOf[NonEmptyList[Any]]))) case _ => acc }) .toList .map({ + case (ds, ids) if ids.unwrap.size == 1 => + FetchOne[Any, Any](ids.head, ds.asInstanceOf[DataSource[Any, Any]]) case (ds, ids) => FetchMany[Any, Any](ids, ds.asInstanceOf[DataSource[Any, Any]]) }) } private[this] def concurrently(fa: Fetch[_], fb: Fetch[_]): Fetch[DataSourceCache] = { - val fetches: List[FetchMany[_, _]] = combineDeps(deps(fa) ++ deps(fb)) + val fetches: List[FetchRequest[_, _]] = combineDeps(deps(fa) ++ deps(fb)) Free.liftF(Concurrent(fetches)) } @@ -171,18 +210,7 @@ object `package` { fetched.fold(many: FetchOp[B])(results => Cached(results)) } case conc @ Concurrent(manies) => { - val newManies = manies - .filterNot({ fm => - val ids: NonEmptyList[Any] = fm.as.asInstanceOf[NonEmptyList[Any]] - val ds: DataSource[Any, _] = fm.ds.asInstanceOf[DataSource[Any, _]] - - ids - .map(id => { - results.get(ds.identity(id)) - }) - .forall(_.isDefined) - }) - .asInstanceOf[List[FetchMany[_, _]]] + val newManies = manies.filterNot(_.fullfilledBy(results)) if (newManies.isEmpty) Cached(results).asInstanceOf[FetchOp[B]] diff --git a/shared/src/main/scala/interpreters.scala b/shared/src/main/scala/interpreters.scala index e07d4d54..a1c34b86 100644 --- a/shared/src/main/scala/interpreters.scala +++ b/shared/src/main/scala/interpreters.scala @@ -33,53 +33,60 @@ case class FetchFailure(env: Env) extends Throwable trait FetchInterpreters { - def interpreter[I]: FetchOp ~> FetchInterpreter = { - def dedupeIds[I, A](ids: NonEmptyList[I], ds: DataSource[I, A], cache: DataSourceCache) = { - ids.unwrap.distinct.filterNot(i => cache.get(ds.identity(i)).isDefined) - } + def pendingRequests( + requests: List[FetchRequest[_, _]], cache: DataSourceCache): List[FetchRequest[Any, Any]] = { + requests + .filterNot(_.fullfilledBy(cache)) + .map(req => { + (req.dataSource, req.missingIdentities(cache)) + }) + .collect({ + case (ds, ids) if ids.size == 1 => + FetchOne[Any, Any](ids.head, ds.asInstanceOf[DataSource[Any, Any]]) + case (ds, ids) if ids.size > 1 => + FetchMany[Any, Any]( + NonEmptyList(ids(0), ids.tail), ds.asInstanceOf[DataSource[Any, Any]]) + }) + } + def interpreter[I]: FetchOp ~> FetchInterpreter = { new (FetchOp ~> FetchInterpreter) { def apply[A](fa: FetchOp[A]): FetchInterpreter[A] = { StateT[Task, FetchEnv, A] { env: FetchEnv => fa match { case FetchError(e) => Task.raiseError(e) case Cached(a) => Task.pure((env, a)) - case Concurrent(manies) => { + case Concurrent(concurrentRequests) => { val startRound = System.nanoTime() val cache = env.cache - val sources = manies.map(_.ds) - val ids = manies.map(_.as) - val sourcesAndIds = (sources zip ids) - .map({ - case (ds, ids) => - ( - ds, - dedupeIds[I, A](ids.asInstanceOf[NonEmptyList[I]], - ds.asInstanceOf[DataSource[I, A]], - cache) - ) - }) - .collect({ - case (ds, ids) if !ids.isEmpty => (ds, NonEmptyList(ids(0), ids.tail)) - }) + val requests: List[FetchRequest[Any, Any]] = + pendingRequests(concurrentRequests, cache) - if (sourcesAndIds.isEmpty) - Task.pure((env, env.cache.asInstanceOf[A])) + if (requests.isEmpty) + Task.pure((env, cache.asInstanceOf[A])) else Task - .sequence(sourcesAndIds.map({ - case (ds, as) => + .sequence(requests.map({ + case FetchOne(a, ds) => { + val ident = a.asInstanceOf[I] + ds.asInstanceOf[DataSource[I, A]] + .fetchOne(ident) + .map((r: Option[A]) => + r.fold(Map.empty[I, A])((result: A) => Map(ident -> result))) + } + case FetchMany(as, ds) => ds.asInstanceOf[DataSource[I, A]] .fetchMany(as.asInstanceOf[NonEmptyList[I]]) })) .flatMap((results: List[Map[_, _]]) => { val endRound = System.nanoTime() val newCache = - (sources zip results).foldLeft(cache)((accache, resultset) => { - val (ds, resultmap) = resultset - val tresults = resultmap.asInstanceOf[Map[I, A]] - val tds = ds.asInstanceOf[DataSource[I, A]] + (requests zip results).foldLeft(cache)((accache, resultset) => { + val (req, resultmap) = resultset + val ds = req.dataSource + val tresults = resultmap.asInstanceOf[Map[I, A]] + val tds = ds.asInstanceOf[DataSource[I, A]] accache.cacheResults[I, A](tresults, tds) }) val newEnv = env.next( @@ -88,9 +95,10 @@ trait FetchInterpreters { cache, "Concurrent", ConcurrentRound( - sourcesAndIds + requests .map({ - case (ds, as) => (ds.name, as.unwrap) + case FetchOne(a, ds) => (ds.name, List(a)) + case FetchMany(as, ds) => (ds.name, as.unwrap) }) .toMap ), @@ -100,20 +108,22 @@ trait FetchInterpreters { Nil ) - val allFetched = (sourcesAndIds zip results).forall({ - case ((_, theIds), results) => theIds.unwrap.size == results.size - case _ => false + val allFullfilled = (requests zip results).forall({ + case (FetchOne(_, _), results) => results.size == 1 + case (FetchMany(as, _), results) => as.unwrap.size == results.size + case _ => false }) - if (allFetched) { + if (allFullfilled) { // since user-provided caches may discard elements, we use an in-memory // cache to gather these intermediate results that will be used for // concurrent optimizations. - val cachedResults = (sources zip results).foldLeft(InMemoryCache.empty)( + val cachedResults = (requests zip results).foldLeft(InMemoryCache.empty)( (cach, resultSet) => { - val (ds, resultmap) = resultSet - val tresults = resultmap.asInstanceOf[Map[I, A]] - val tds = ds.asInstanceOf[DataSource[I, A]] + val (req, resultmap) = resultSet + val ds = req.dataSource + val tresults = resultmap.asInstanceOf[Map[I, A]] + val tds = ds.asInstanceOf[DataSource[I, A]] cach.cacheResults[I, A](tresults, tds).asInstanceOf[InMemoryCache] }) Task.pure((newEnv, cachedResults.asInstanceOf[A])) @@ -179,11 +189,10 @@ trait FetchInterpreters { ) }) } - case FetchMany(ids, ds) => { + case many @ FetchMany(ids, ds) => { val startRound = System.nanoTime() val cache = env.cache - val oldIds = ids.unwrap.distinct - val newIds = dedupeIds[Any, Any](ids, ds, cache) + val newIds = many.missingIdentities(cache) if (newIds.isEmpty) Task.pure( (env.next( @@ -225,7 +234,6 @@ trait FetchInterpreters { val endRound = System.nanoTime() val newCache = cache.cacheResults[I, A](res, ds.asInstanceOf[DataSource[I, A]]) - val someCached = oldIds.size == newIds.size Task.pure( (env.next( newCache, @@ -234,7 +242,7 @@ trait FetchInterpreters { ManyRound(ids.unwrap), startRound, endRound, - someCached), + results.size < ids.unwrap.distinct.size), newIds ), results) diff --git a/shared/src/test/scala/FetchTests.scala b/shared/src/test/scala/FetchTests.scala index 2f23d809..aab8df94 100644 --- a/shared/src/test/scala/FetchTests.scala +++ b/shared/src/test/scala/FetchTests.scala @@ -85,7 +85,7 @@ object TestHelper { round.kind match { case OneRound(_) => acc case ManyRound(ids) => acc + 1 - case ConcurrentRound(ids) => acc + ids.size + case ConcurrentRound(ids) => acc + ids.filter(_._2.size > 1).size }) def concurrent(rs: Seq[Round]): Seq[Round] = @@ -357,6 +357,16 @@ class FetchTests extends AsyncFreeSpec with Matchers { }) } + "Concurrent fetching calls batches only wen it can" in { + val fetch: Fetch[(Int, List[Int])] = Fetch.join(one(1), many(3)) + + val task = Fetch.runEnv(fetch) + + toFuture(task).map(env => { + totalBatches(env.rounds) shouldEqual 0 + }) + } + "If a fetch fails in the left hand of a product the product will fail" in { val fetch: Fetch[(Int, List[Int])] = Fetch.join(Fetch.error(NotFound()), many(3)) val task = Fetch.run(fetch) @@ -435,7 +445,7 @@ class FetchTests extends AsyncFreeSpec with Matchers { val rounds = env.rounds val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) - stats shouldEqual (2, 2, 4) + stats shouldEqual (2, 1, 4) }) } From 985f0c3c477e532a1e8fa11a0183f7d65a69c24f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sun, 29 May 2016 19:36:57 +0200 Subject: [PATCH 10/40] Update README --- README.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 3caeae6e..66a0c618 100644 --- a/README.md +++ b/README.md @@ -105,7 +105,7 @@ import monix.execution.Scheduler.Implicits.global // import monix.execution.Scheduler.Implicits.global result.coeval.value -// [237] One ToString 1 +// [62] One ToString 1 // res3: Either[monix.execution.CancelableFuture[String],String] = Right(1) ``` @@ -122,7 +122,7 @@ import cats.syntax.cartesian._ // import cats.syntax.cartesian._ val fetchThree: Fetch[(String, String, String)] = (fetchString(1) |@| fetchString(2) |@| fetchString(3)).tupled -// fetchThree: fetch.Fetch[(String, String, String)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List(2, 3)),ToStringSource$@4afe2c89)))),),) +// fetchThree: fetch.Fetch[(String, String, String)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List(2, 3)),ToStringSource$@5491cfd8)))),),) val result: Task[(String, String, String)] = fetchThree.runA // result: monix.eval.Task[(String, String, String)] = BindSuspend(,) @@ -135,7 +135,7 @@ When executing the above fetch, note how the three identities get batched and th ```scala await(result) -// [237] Many ToString OneAnd(1,List(2, 3)) +// [62] Many ToString OneAnd(1,List(2, 3)) // res4: (String, String, String) = (1,2,3) ``` @@ -168,7 +168,7 @@ And now we can easily receive data from the two sources in a single fetch. ```scala val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled -// fetchMulti: fetch.Fetch[(String, Int)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List()),ToStringSource$@4afe2c89), FetchMany(OneAnd(one,List()),LengthSource$@671df379)))),),) +// fetchMulti: fetch.Fetch[(String, Int)] = Gosub(Gosub(Suspend(Concurrent(List(FetchOne(1,ToStringSource$@5491cfd8), FetchOne(one,LengthSource$@36f594ec)))),),) val result = fetchMulti.runA // result: monix.eval.Task[(String, Int)] = BindSuspend(,) @@ -178,8 +178,8 @@ Note how the two independent data fetches are run in parallel, minimizing the la ```scala await(result) -// [237] Many ToString OneAnd(1,List()) -// [183] Many Length OneAnd(one,List()) +// [62] One ToString 1 +// [56] One Length one // res6: (String, Int) = (1,3) ``` @@ -192,13 +192,13 @@ val fetchTwice: Fetch[(String, String)] = for { one <- fetchString(1) two <- fetchString(1) } yield (one, two) -// fetchTwice: fetch.Fetch[(String, String)] = Gosub(Suspend(FetchOne(1,ToStringSource$@4afe2c89)),) +// fetchTwice: fetch.Fetch[(String, String)] = Gosub(Suspend(FetchOne(1,ToStringSource$@5491cfd8)),) ``` While running it, notice that the data source is only queried once. The next time the identity is requested it's served from the cache. ```scala val result: (String, String) = await(fetchTwice.runA) -// [237] One ToString 1 +// [62] One ToString 1 // result: (String, String) = (1,1) ``` From adbf420b9b6f9afb9da1649ce7ad9b361374ff09 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sun, 29 May 2016 19:44:12 +0200 Subject: [PATCH 11/40] Fix link --- docs/src/tut/docs.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/tut/docs.md b/docs/src/tut/docs.md index 776d5089..9ae37a70 100644 --- a/docs/src/tut/docs.md +++ b/docs/src/tut/docs.md @@ -79,7 +79,7 @@ It takes two type parameters: - `Result`: the type of the data we retrieve (a `User` if we were fetching users) There are two methods: `fetchOne` and `fetchMany`. `fetchOne` receives one identity and must return -a [Task](https://github.com/typelevel/cats/blob/master/core/src/main/scala/cats/Eval.scala) containing +a [Task](https://github.com/monixio/monix/blob/dd6e47b7b870b38825d516f846f6e074d78d5c40/monix-eval/shared/src/main/scala/monix/eval/Task.scala) containing an optional result. Returning an `Option` Fetch can detect whether an identity couldn't be fetched or no longer exists. `fetchMany` method takes a non-empty list of identities and must return a `Task` containing From e8d5e8765b8f70fcb2564377ee2931acc116b837 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Tue, 31 May 2016 11:07:35 +0200 Subject: [PATCH 12/40] Support asynchronous data sources --- build.sbt | 3 +- shared/src/main/scala/datasource.scala | 36 +- shared/src/main/scala/fetch.scala | 77 +++- shared/src/main/scala/implicits.scala | 63 ++- shared/src/main/scala/interpreters.scala | 283 ++++++------ shared/src/main/scala/syntax.scala | 23 +- ...Tests.scala => FetchAsyncQueryTests.scala} | 60 +-- shared/src/test/scala/FetchTests.scala | 429 +++++++++--------- 8 files changed, 520 insertions(+), 454 deletions(-) rename shared/src/test/{scala/FetchFutureTests.scala => FetchAsyncQueryTests.scala} (63%) diff --git a/build.sbt b/build.sbt index fc117764..e19b7af3 100644 --- a/build.sbt +++ b/build.sbt @@ -18,7 +18,8 @@ lazy val commonSettings = Seq( resolvers += Resolver.sonatypeRepo("releases"), libraryDependencies ++= Seq( "org.typelevel" %%% "cats" % "0.6.0", - "io.monix" %%% "monix-eval" % "2.0-RC3", + "io.monix" %%% "monix-eval" % "2.0-RC5", + "io.monix" %%% "monix-cats" % "2.0-RC5", "org.scalatest" %%% "scalatest" % "3.0.0-M7" % "test", compilerPlugin( "org.spire-math" %% "kind-projector" % "0.7.1" diff --git a/shared/src/main/scala/datasource.scala b/shared/src/main/scala/datasource.scala index 49e1e248..61d0beb2 100644 --- a/shared/src/main/scala/datasource.scala +++ b/shared/src/main/scala/datasource.scala @@ -40,26 +40,28 @@ trait DataSource[I, A] { /** Fetch one identity, returning a None if it wasn't found. */ - def fetchOne(id: I): Task[Option[A]] + def fetchOne(id: I): Query[Option[A]] /** Fetch many identities, returning a mapping from identities to results. If an * identity wasn't found won't appear in the keys. */ - def fetchMany(ids: NonEmptyList[I]): Task[Map[I, A]] + def fetchMany(ids: NonEmptyList[I]): Query[Map[I, A]] - /** Use `fetchOne` for implementing of `fetchMany`. Use only when the data - * source doesn't support batching. - */ - def batchingNotSupported(ids: NonEmptyList[I]): Task[Map[I, A]] = { - val idsList = ids.unwrap - Task - .sequence(idsList.map(fetchOne)) - .map(results => { - (idsList zip results) - .collect({ - case (id, Some(result)) => (id, result) - }) - .toMap - }) - } + // FIXME: query can be applicative? + // /** Use `fetchOne` for implementing of `fetchMany`. Use only when the data + // * source doesn't support batching. + // */ + // def batchingNotSupported(ids: NonEmptyList[I]): Query[Map[I, A]] = { + // val idsList = ids.unwrap + // idsList + // .map(fetchOne) + // .sequence + // .map(results => { + // (idsList zip results) + // .collect({ + // case (id, Some(result)) => (id, result) + // }) + // .toMap + // }) + // } } diff --git a/shared/src/main/scala/fetch.scala b/shared/src/main/scala/fetch.scala index d87cd444..ef1b9009 100644 --- a/shared/src/main/scala/fetch.scala +++ b/shared/src/main/scala/fetch.scala @@ -18,14 +18,29 @@ package fetch import scala.collection.immutable.Map -import monix.eval.Task - -import cats.{Applicative, Monad, MonadError, ~>} +import cats.{Applicative, Monad, ApplicativeError, MonadError, ~>} import cats.data.{StateT, Const, NonEmptyList} import cats.free.{Free} import cats.std.list._ import cats.std.option._ import cats.syntax.traverse._ +import scala.concurrent.duration._ + +sealed trait Query[A] extends Product with Serializable +final case class Now[A](a: A) extends Query[A] +final case class Later[A](a: () => A) extends Query[A] +final case class Async[A](action: (Query.Callback[A], Query.Errback) => Unit) extends Query[A] // todo: timeout + +object Query { + type Callback[A] = A => Unit + type Errback = Throwable => Unit + + def now[A](x: A): Query[A] = Now(x) + def later[A](th: => A): Query[A] = Later(th _) + def async[A]( + action: (Callback[A], Errback) => Unit //, timeout: FiniteDuration + ): Query[A] = Async(action) +} /** Requests in Fetch Free monad. */ @@ -76,18 +91,14 @@ object `package` { type Fetch[A] = Free[FetchOp, A] - type FetchMonadError[M[_]] = MonadError[M, Throwable] - - type FetchInterpreter[A] = StateT[Task, FetchEnv, A] - - implicit val fetchTaskMonad: Monad[Task] = new Monad[Task] with Applicative[Task] { - def pure[A](x: A): Task[A] = Task.pure(x) - - override def ap[A, B](ff: Task[A => B])(fa: Task[A]): Task[B] = - Task.mapBoth(ff, fa)((f, a) => f(a)) + trait FetchMonadError[M[_]] extends MonadError[M, Throwable] { + def runQuery[A](q: Query[A]): M[A] + } - def flatMap[A, B](fa: Task[A])(f: A => Task[B]): Task[B] = fa.flatMap(f) + type FetchInterpreter[M[_]] = { + type f[x] = StateT[M, FetchEnv, x] } + implicit val fetchApplicative: Applicative[Fetch] = new Applicative[Fetch] { def pure[A](a: A): Fetch[A] = Fetch.pure(a) @@ -247,24 +258,50 @@ object `package` { } yield result } + class FetchRunner[M[_]] { + def apply[A]( + fa: Fetch[A], + cache: DataSourceCache = InMemoryCache.empty + )( + implicit MM: FetchMonadError[M] + ): M[(FetchEnv, A)] = + fa.foldMap[FetchInterpreter[M]#f](interpreter).run(FetchEnv(cache)) + } + /** * Run a `Fetch` with the given cache, returning a pair of the final environment and result * in the monad `M`. */ - def runFetch[A]( - fa: Fetch[A], cache: DataSourceCache = InMemoryCache.empty): Task[(FetchEnv, A)] = - fa.foldMap(interpreter).run(FetchEnv(cache)) + def runFetch[M[_]]: FetchRunner[M] = new FetchRunner[M] + + class FetchRunnerEnv[M[_]] { + def apply[A]( + fa: Fetch[A], + cache: DataSourceCache = InMemoryCache.empty + )( + implicit MM: FetchMonadError[M] + ): M[FetchEnv] = + fa.foldMap[FetchInterpreter[M]#f](interpreter).runS(FetchEnv(cache)) + } /** * Run a `Fetch` with the given cache, returning the final environment in the monad `M`. */ - def runEnv[A](fa: Fetch[A], cache: DataSourceCache = InMemoryCache.empty): Task[FetchEnv] = - runFetch(fa, cache).map(_._1) + def runEnv[M[_]]: FetchRunnerEnv[M] = new FetchRunnerEnv[M] + + class FetchRunnerA[M[_]] { + def apply[A]( + fa: Fetch[A], + cache: DataSourceCache = InMemoryCache.empty + )( + implicit MM: FetchMonadError[M] + ): M[A] = + fa.foldMap[FetchInterpreter[M]#f](interpreter).runA(FetchEnv(cache)) + } /** * Run a `Fetch` with the given cache, the result in the monad `M`. */ - def run[A](fa: Fetch[A], cache: DataSourceCache = InMemoryCache.empty): Task[A] = - runFetch(fa, cache).map(_._2) + def run[M[_]]: FetchRunnerA[M] = new FetchRunnerA[M] } } diff --git a/shared/src/main/scala/implicits.scala b/shared/src/main/scala/implicits.scala index 589714e3..7017dfc5 100644 --- a/shared/src/main/scala/implicits.scala +++ b/shared/src/main/scala/implicits.scala @@ -16,20 +16,59 @@ package fetch -import cats.ApplicativeError import monix.eval.Task +import monix.execution.Cancelable +import monix.execution.Scheduler +import cats.ApplicativeError +import scala.concurrent.{Promise, Future, ExecutionContext} object implicits { - implicit val fetchTaskApplicativeError: ApplicativeError[Task, Throwable] = - new ApplicativeError[Task, Throwable] { - def pure[A](x: A): monix.eval.Task[A] = - Task.pure(x) - def ap[A, B](ff: monix.eval.Task[A => B])(fa: monix.eval.Task[A]): monix.eval.Task[B] = - Task.mapBoth(ff, fa)((f, x) => f(x)) - def handleErrorWith[A](fa: monix.eval.Task[A])( - f: Throwable => monix.eval.Task[A]): monix.eval.Task[A] = - fa.onErrorHandleWith(f) - def raiseError[A](e: Throwable): monix.eval.Task[A] = - Task.raiseError(e) + implicit val fetchTaskFetchMonadError: FetchMonadError[Task] = new FetchMonadError[Task] { + override def runQuery[A](j: Query[A]): Task[A] = j match { + case Now(x) => Task.now(x) + case Later(x) => Task.evalAlways({ x() }) + case Async(ac) => + Task.create( + (scheduler, callback) => { + + scheduler.execute(new Runnable { + def run() = ac(callback.onSuccess, callback.onError) + }) + + Cancelable.empty + }) + } + + def pure[A](x: A): Task[A] = Task.now(x) + def handleErrorWith[A](fa: monix.eval.Task[A])( + f: Throwable => monix.eval.Task[A]): monix.eval.Task[A] = fa.onErrorHandleWith(f) + override def ap[A, B](f: Task[A => B])(x: Task[A]): Task[B] = + Task.mapBoth(f, x)((f, x) => f(x)) + def raiseError[A](e: Throwable): monix.eval.Task[A] = Task.raiseError(e) + def flatMap[A, B](fa: monix.eval.Task[A])(f: A => monix.eval.Task[B]): monix.eval.Task[B] = + fa.flatMap(f) + } + + implicit def fetchFutureFetchMonadError( + implicit ec: ExecutionContext + ): FetchMonadError[Future] = new FetchMonadError[Future] { + override def runQuery[A](j: Query[A]): Future[A] = j match { + case Now(x) => Future.successful(x) + case Later(x) => Future({ x() }) + case Async(ac) => { + val p = Promise[A]() + + ec.execute(new Runnable { + def run() = ac(p.trySuccess _, p.tryFailure _) + }) + + p.future + } } + def pure[A](x: A): Future[A] = Future.successful(x) + def handleErrorWith[A](fa: Future[A])(f: Throwable => Future[A]): Future[A] = + fa.recoverWith({ case t => f(t) }) + def raiseError[A](e: Throwable): Future[A] = Future.failed(e) + def flatMap[A, B](fa: Future[A])(f: A => Future[B]): Future[B] = fa.flatMap(f) + } } diff --git a/shared/src/main/scala/interpreters.scala b/shared/src/main/scala/interpreters.scala index a1c34b86..7bf3ea0c 100644 --- a/shared/src/main/scala/interpreters.scala +++ b/shared/src/main/scala/interpreters.scala @@ -18,8 +18,6 @@ package fetch import scala.collection.immutable._ -import monix.eval.Task - import cats.{MonadError, ~>} import cats.data.{StateT, NonEmptyList} import cats.std.option._ @@ -49,13 +47,15 @@ trait FetchInterpreters { }) } - def interpreter[I]: FetchOp ~> FetchInterpreter = { - new (FetchOp ~> FetchInterpreter) { - def apply[A](fa: FetchOp[A]): FetchInterpreter[A] = { - StateT[Task, FetchEnv, A] { env: FetchEnv => + def interpreter[I, M[_]]( + implicit M: FetchMonadError[M] + ): FetchOp ~> FetchInterpreter[M]#f = { + new (FetchOp ~> FetchInterpreter[M]#f) { + def apply[A](fa: FetchOp[A]): FetchInterpreter[M]#f[A] = { + StateT[M, FetchEnv, A] { env: FetchEnv => fa match { - case FetchError(e) => Task.raiseError(e) - case Cached(a) => Task.pure((env, a)) + case FetchError(e) => M.raiseError(e) + case Cached(a) => M.pure((env, a)) case Concurrent(concurrentRequests) => { val startRound = System.nanoTime() val cache = env.cache @@ -64,117 +64,115 @@ trait FetchInterpreters { pendingRequests(concurrentRequests, cache) if (requests.isEmpty) - Task.pure((env, cache.asInstanceOf[A])) - else - Task - .sequence(requests.map({ - case FetchOne(a, ds) => { - val ident = a.asInstanceOf[I] - ds.asInstanceOf[DataSource[I, A]] - .fetchOne(ident) - .map((r: Option[A]) => - r.fold(Map.empty[I, A])((result: A) => Map(ident -> result))) - } - case FetchMany(as, ds) => - ds.asInstanceOf[DataSource[I, A]] - .fetchMany(as.asInstanceOf[NonEmptyList[I]]) - })) - .flatMap((results: List[Map[_, _]]) => { - val endRound = System.nanoTime() - val newCache = - (requests zip results).foldLeft(cache)((accache, resultset) => { - val (req, resultmap) = resultset - val ds = req.dataSource - val tresults = resultmap.asInstanceOf[Map[I, A]] - val tds = ds.asInstanceOf[DataSource[I, A]] - accache.cacheResults[I, A](tresults, tds) - }) - val newEnv = env.next( - newCache, - Round( - cache, - "Concurrent", - ConcurrentRound( - requests - .map({ - case FetchOne(a, ds) => (ds.name, List(a)) - case FetchMany(as, ds) => (ds.name, as.unwrap) - }) - .toMap - ), - startRound, - endRound - ), - Nil - ) + M.pure((env, cache.asInstanceOf[A])) + else { + val sentRequests = M.sequence(requests.map({ + case FetchOne(a, ds) => { + val ident = a.asInstanceOf[I] + val task = M.runQuery(ds.asInstanceOf[DataSource[I, A]].fetchOne(ident)) + M.map(task)((r: Option[A]) => + r.fold(Map.empty[I, A])((result: A) => Map(ident -> result))) + } + case FetchMany(as, ds) => + M.runQuery(ds + .asInstanceOf[DataSource[I, A]] + .fetchMany(as.asInstanceOf[NonEmptyList[I]])) + })) + M.flatMap(sentRequests)((results: List[Map[_, _]]) => { + val endRound = System.nanoTime() + val newCache = (requests zip results).foldLeft(cache)((accache, resultset) => { + val (req, resultmap) = resultset + val ds = req.dataSource + val tresults = resultmap.asInstanceOf[Map[I, A]] + val tds = ds.asInstanceOf[DataSource[I, A]] + accache.cacheResults[I, A](tresults, tds) + }) + val newEnv = env.next( + newCache, + Round( + cache, + "Concurrent", + ConcurrentRound( + requests + .map({ + case FetchOne(a, ds) => (ds.name, List(a)) + case FetchMany(as, ds) => (ds.name, as.unwrap) + }) + .toMap + ), + startRound, + endRound + ), + Nil + ) - val allFullfilled = (requests zip results).forall({ - case (FetchOne(_, _), results) => results.size == 1 - case (FetchMany(as, _), results) => as.unwrap.size == results.size - case _ => false - }) + val allFullfilled = (requests zip results).forall({ + case (FetchOne(_, _), results) => results.size == 1 + case (FetchMany(as, _), results) => as.unwrap.size == results.size + case _ => false + }) - if (allFullfilled) { - // since user-provided caches may discard elements, we use an in-memory - // cache to gather these intermediate results that will be used for - // concurrent optimizations. - val cachedResults = (requests zip results).foldLeft(InMemoryCache.empty)( - (cach, resultSet) => { + if (allFullfilled) { + // since user-provided caches may discard elements, we use an in-memory + // cache to gather these intermediate results that will be used for + // concurrent optimizations. + val cachedResults = + (requests zip results).foldLeft(InMemoryCache.empty)((cach, resultSet) => { val (req, resultmap) = resultSet val ds = req.dataSource val tresults = resultmap.asInstanceOf[Map[I, A]] val tds = ds.asInstanceOf[DataSource[I, A]] cach.cacheResults[I, A](tresults, tds).asInstanceOf[InMemoryCache] }) - Task.pure((newEnv, cachedResults.asInstanceOf[A])) - } else { - Task.raiseError(FetchFailure(newEnv)) - } - }) + M.pure((newEnv, cachedResults.asInstanceOf[A])) + } else { + M.raiseError(FetchFailure(newEnv)) + } + }) + } } case FetchOne(id, ds) => { val startRound = System.nanoTime() val cache = env.cache cache .get(ds.identity(id)) - .fold[Task[(FetchEnv, A)]]( - ds.fetchOne(id) - .flatMap((res: Option[A]) => { - val endRound = System.nanoTime() - res.fold[Task[(FetchEnv, A)]]( - Task.raiseError( - FetchFailure( - env.next( - cache, - Round(cache, - ds.name, - OneRound(id), - startRound, - endRound), - List(id) - ) - ) - ) - )(result => { - val endRound = System.nanoTime() - val newCache = cache.update(ds.identity(id), result) - Task.pure( - (env.next( - newCache, - Round(cache, - ds.name, - OneRound(id), - startRound, - endRound), - List(id) - ), - result) + .fold[M[(FetchEnv, A)]]( + M.flatMap(M.runQuery(ds.fetchOne(id)))((res: Option[A]) => { + val endRound = System.nanoTime() + res.fold[M[(FetchEnv, A)]]( + M.raiseError( + FetchFailure( + env.next( + cache, + Round(cache, + ds.name, + OneRound(id), + startRound, + endRound), + List(id) + ) + ) ) - }) + )(result => { + val endRound = System.nanoTime() + val newCache = cache.update(ds.identity(id), result) + M.pure( + (env.next( + newCache, + Round(cache, + ds.name, + OneRound(id), + startRound, + endRound), + List(id) + ), + result) + ) }) + }) )(cached => { val endRound = System.nanoTime() - Task.pure( + M.pure( (env.next( cache, Round(cache, @@ -194,7 +192,7 @@ trait FetchInterpreters { val cache = env.cache val newIds = many.missingIdentities(cache) if (newIds.isEmpty) - Task.pure( + M.pure( (env.next( cache, Round(cache, @@ -208,47 +206,48 @@ trait FetchInterpreters { ids.unwrap.flatMap(id => cache.get(ds.identity(id)))) ) else { - ds.asInstanceOf[DataSource[I, A]] - .fetchMany( - NonEmptyList(newIds(0).asInstanceOf[I], newIds.tail.asInstanceOf[List[I]])) - .flatMap((res: Map[I, A]) => { - val endRound = System.nanoTime() - ids.unwrap - .map(i => res.get(i.asInstanceOf[I])) - .sequence - .fold[Task[(FetchEnv, A)]]( - Task.raiseError( - FetchFailure( - env.next( - cache, - Round(cache, - ds.name, - ManyRound(ids.unwrap), - startRound, - endRound), - newIds - ) - ) - ) - )(results => { - val endRound = System.nanoTime() - val newCache = - cache.cacheResults[I, A](res, ds.asInstanceOf[DataSource[I, A]]) - Task.pure( - (env.next( - newCache, - Round(cache, - ds.name, - ManyRound(ids.unwrap), - startRound, - endRound, - results.size < ids.unwrap.distinct.size), - newIds - ), - results) + M.flatMap(M.runQuery(ds + .asInstanceOf[DataSource[I, A]] + .fetchMany(NonEmptyList(newIds(0).asInstanceOf[I], + newIds.tail.asInstanceOf[List[I]]))))( + (res: Map[I, A]) => { + val endRound = System.nanoTime() + ids.unwrap + .map(i => res.get(i.asInstanceOf[I])) + .sequence + .fold[M[(FetchEnv, A)]]( + M.raiseError( + FetchFailure( + env.next( + cache, + Round(cache, + ds.name, + ManyRound(ids.unwrap), + startRound, + endRound), + newIds + ) + ) ) - }) - }) + )(results => { + val endRound = System.nanoTime() + val newCache = + cache.cacheResults[I, A](res, ds.asInstanceOf[DataSource[I, A]]) + M.pure( + (env.next( + newCache, + Round(cache, + ds.name, + ManyRound(ids.unwrap), + startRound, + endRound, + results.size < ids.unwrap.distinct.size), + newIds + ), + results) + ) + }) + }) } } } diff --git a/shared/src/main/scala/syntax.scala b/shared/src/main/scala/syntax.scala index 4b7dc1a3..548fe411 100644 --- a/shared/src/main/scala/syntax.scala +++ b/shared/src/main/scala/syntax.scala @@ -28,7 +28,7 @@ object syntax { } /** Implicit syntax to lift exception to Fetch errors */ - implicit class FetchErrorSyntax[A <: Throwable](val a: A) extends AnyVal { + implicit class FetchErrorSyntax(val a: Throwable) extends AnyVal { def fetch[B]: Fetch[B] = Fetch.error[B](a) @@ -40,22 +40,13 @@ object syntax { def join[B](fb: Fetch[B]): Fetch[(A, B)] = Fetch.join(fa, fb) - def runF: Task[(FetchEnv, A)] = - Fetch.runFetch(fa, InMemoryCache.empty) + def runF[M[_]: FetchMonadError]: M[(FetchEnv, A)] = + Fetch.runFetch[M](fa, InMemoryCache.empty) - def runE: Task[FetchEnv] = - Fetch.runEnv(fa, InMemoryCache.empty) + def runE[M[_]: FetchMonadError]: M[FetchEnv] = + Fetch.runEnv[M](fa, InMemoryCache.empty) - def runA: Task[A] = - Fetch.run(fa, InMemoryCache.empty) - - def runF(cache: DataSourceCache): Task[(FetchEnv, A)] = - Fetch.runFetch(fa, cache) - - def runE(cache: DataSourceCache): Task[FetchEnv] = - Fetch.runEnv(fa, cache) - - def runA(cache: DataSourceCache): Task[A] = - Fetch.run(fa, cache) + def runA[M[_]: FetchMonadError]: M[A] = + Fetch.run[M](fa, InMemoryCache.empty) } } diff --git a/shared/src/test/scala/FetchFutureTests.scala b/shared/src/test/FetchAsyncQueryTests.scala similarity index 63% rename from shared/src/test/scala/FetchFutureTests.scala rename to shared/src/test/FetchAsyncQueryTests.scala index 9cdc136a..8e79bfc8 100644 --- a/shared/src/test/scala/FetchFutureTests.scala +++ b/shared/src/test/FetchAsyncQueryTests.scala @@ -19,15 +19,15 @@ import scala.concurrent.duration._ import org.scalatest._ -import monix.eval._ import monix.execution.Scheduler import cats.data.NonEmptyList import cats.std.list._ import fetch._ +import fetch.implicits._ -class FetchFutureTests extends AsyncFreeSpec with Matchers { +class FetchAsyncQueryTests extends AsyncFreeSpec with Matchers { implicit def executionContext = Scheduler.Implicits.global - override def newInstance = new FetchFutureTests + override def newInstance = new FetchAsyncQueryTests case class ArticleId(id: Int) case class Article(id: Int, content: String) { @@ -36,11 +36,13 @@ class FetchFutureTests extends AsyncFreeSpec with Matchers { implicit object ArticleFuture extends DataSource[ArticleId, Article] { override def name = "ArticleFuture" - override def fetchOne(id: ArticleId): Task[Option[Article]] = - Task.pure(Option(Article(id.id, "An article with id " + id.id))) - override def fetchMany(ids: NonEmptyList[ArticleId]): Task[Map[ArticleId, Article]] = { - Task.now({ - ids.unwrap.map(tid => (tid, Article(tid.id, "An article with id " + tid.id))).toMap + override def fetchOne(id: ArticleId): Query[Option[Article]] = + Query.async((ok, fail) => { + ok(Option(Article(id.id, "An article with id " + id.id))) + }) + override def fetchMany(ids: NonEmptyList[ArticleId]): Query[Map[ArticleId, Article]] = { + Query.async((ok, fail) => { + ok(ids.unwrap.map(tid => (tid, Article(tid.id, "An article with id " + tid.id))).toMap) }) } } @@ -52,53 +54,42 @@ class FetchFutureTests extends AsyncFreeSpec with Matchers { implicit object AuthorFuture extends DataSource[AuthorId, Author] { override def name = "AuthorFuture" - override def fetchOne(id: AuthorId): Task[Option[Author]] = - Task.now(Option(Author(id.id, "@egg" + id.id))) - override def fetchMany(ids: NonEmptyList[AuthorId]): Task[Map[AuthorId, Author]] = { - Task.now({ - ids.unwrap.map(tid => (tid, Author(tid.id, "@egg" + tid.id))).toMap + override def fetchOne(id: AuthorId): Query[Option[Author]] = + Query.async((ok, fail) => { + ok(Option(Author(id.id, "@egg" + id.id))) + }) + override def fetchMany(ids: NonEmptyList[AuthorId]): Query[Map[AuthorId, Author]] = { + Query.async((ok, fail) => { + ok(ids.unwrap.map(tid => (tid, Author(tid.id, "@egg" + tid.id))).toMap) }) } } def author(a: Article): Fetch[Author] = Fetch(AuthorId(a.author)) - def toFuture[A](task: Task[A]): Future[A] = { - val promise: Promise[A] = Promise() - task.runAsync( - new Callback[A] { - def onSuccess(value: A): Unit = { promise.success(value); () } - def onError(ex: Throwable): Unit = { promise.failure(ex); () } - }) - promise.future - } - - "We can interpret a fetch into a future" in { + "We can interpret an async fetch into a future" in { val fetch: Fetch[Article] = article(1) - - val task: Task[Article] = Fetch.run(fetch) - val fut: Future[Article] = toFuture(task) - + val fut: Future[Article] = Fetch.run[Future](fetch) fut.map(_ shouldEqual Article(1, "An article with id 1")) } - "We can combine several data sources and interpret a fetch into a future" in { + "We can combine several async data sources and interpret a fetch into a future" in { val fetch: Fetch[(Article, Author)] = for { art <- article(1) author <- author(art) } yield (art, author) - val fut: Future[(Article, Author)] = toFuture(Fetch.run(fetch)) + val fut: Future[(Article, Author)] = Fetch.run[Future](fetch) fut.map(_ shouldEqual (Article(1, "An article with id 1"), Author(2, "@egg2"))) } - "We can use combinators in a for comprehension and interpret a fetch into a future" in { + "We can use combinators in a for comprehension and interpret a fetch from async sources into a future" in { val fetch: Fetch[List[Article]] = for { articles <- Fetch.traverse(List(1, 1, 2))(article) } yield articles - val fut: Future[List[Article]] = toFuture(Fetch.run(fetch)) + val fut: Future[List[Article]] = Fetch.run[Future](fetch) fut.map( _ shouldEqual List( @@ -109,14 +100,13 @@ class FetchFutureTests extends AsyncFreeSpec with Matchers { ) } - "We can use combinators and multiple sources in a for comprehension and interpret a fetch into a future" in { + "We can use combinators and multiple sources in a for comprehension and interpret a fetch from async sources into a future" in { val fetch = for { articles <- Fetch.traverse(List(1, 1, 2))(article) authors <- Fetch.traverse(articles)(author) } yield (articles, authors) - val fut: Future[(List[Article], List[Author])] = - toFuture(Fetch.run(fetch, InMemoryCache.empty)) + val fut: Future[(List[Article], List[Author])] = Fetch.run[Future](fetch, InMemoryCache.empty) fut.map( _ shouldEqual ( diff --git a/shared/src/test/scala/FetchTests.scala b/shared/src/test/scala/FetchTests.scala index aab8df94..745392ee 100644 --- a/shared/src/test/scala/FetchTests.scala +++ b/shared/src/test/scala/FetchTests.scala @@ -21,9 +21,12 @@ import org.scalatest._ import monix.eval._ import monix.execution.Scheduler -import cats.data.NonEmptyList +import cats.{MonadError} +import cats.data.{NonEmptyList, Xor} import cats.std.list._ + import fetch._ +import fetch.implicits._ object TestHelper { import fetch.syntax._ @@ -33,40 +36,40 @@ object TestHelper { case class One(id: Int) implicit object OneSource extends DataSource[One, Int] { override def name = "OneSource" - override def fetchOne(id: One): Task[Option[Int]] = { - Task.pure(Option(id.id)) + override def fetchOne(id: One): Query[Option[Int]] = { + Query.now(Option(id.id)) } - override def fetchMany(ids: NonEmptyList[One]): Task[Map[One, Int]] = - Task.pure(ids.unwrap.map(one => (one, one.id)).toMap) + override def fetchMany(ids: NonEmptyList[One]): Query[Map[One, Int]] = + Query.now(ids.unwrap.map(one => (one, one.id)).toMap) } def one(id: Int): Fetch[Int] = Fetch(One(id)) case class AnotherOne(id: Int) implicit object AnotheroneSource extends DataSource[AnotherOne, Int] { override def name = "AnotherOneSource" - override def fetchOne(id: AnotherOne): Task[Option[Int]] = - Task.pure(Option(id.id)) - override def fetchMany(ids: NonEmptyList[AnotherOne]): Task[Map[AnotherOne, Int]] = - Task.pure(ids.unwrap.map(anotherone => (anotherone, anotherone.id)).toMap) + override def fetchOne(id: AnotherOne): Query[Option[Int]] = + Query.now(Option(id.id)) + override def fetchMany(ids: NonEmptyList[AnotherOne]): Query[Map[AnotherOne, Int]] = + Query.now(ids.unwrap.map(anotherone => (anotherone, anotherone.id)).toMap) } def anotherOne(id: Int): Fetch[Int] = Fetch(AnotherOne(id)) case class Many(n: Int) implicit object ManySource extends DataSource[Many, List[Int]] { override def name = "ManySource" - override def fetchOne(id: Many): Task[Option[List[Int]]] = - Task.pure(Option(0 until id.n toList)) - override def fetchMany(ids: NonEmptyList[Many]): Task[Map[Many, List[Int]]] = - Task.pure(ids.unwrap.map(m => (m, 0 until m.n toList)).toMap) + override def fetchOne(id: Many): Query[Option[List[Int]]] = + Query.now(Option(0 until id.n toList)) + override def fetchMany(ids: NonEmptyList[Many]): Query[Map[Many, List[Int]]] = + Query.now(ids.unwrap.map(m => (m, 0 until m.n toList)).toMap) } case class Never() implicit object NeverSource extends DataSource[Never, Int] { override def name = "NeverSource" - override def fetchOne(id: Never): Task[Option[Int]] = - Task.pure(None) - override def fetchMany(ids: NonEmptyList[Never]): Task[Map[Never, Int]] = - Task.pure(Map.empty[Never, Int]) + override def fetchOne(id: Never): Query[Option[Int]] = + Query.now(None) + override def fetchMany(ids: NonEmptyList[Never]): Query[Map[Never, Int]] = + Query.now(Map.empty[Never, Int]) } def many(id: Int): Fetch[List[Int]] = Fetch(Many(id)) @@ -96,24 +99,14 @@ object TestHelper { case other => false } ) - - def toFuture[A](task: Task[A])( - implicit s: Scheduler - ): Future[A] = { - val promise: Promise[A] = Promise() - task.runAsync( - new Callback[A] { - def onSuccess(value: A): Unit = { promise.trySuccess(value); () } - def onError(ex: Throwable): Unit = { promise.tryFailure(ex); () } - }) - promise.future - } } class FetchSyntaxTests extends AsyncFreeSpec with Matchers { import fetch.syntax._ import TestHelper._ + val ME = implicitly[FetchMonadError[Future]] + implicit def executionContext = Scheduler.Implicits.global override def newInstance = new FetchSyntaxTests @@ -122,9 +115,9 @@ class FetchSyntaxTests extends AsyncFreeSpec with Matchers { val fetch: Fetch[(Int, List[Int])] = (one(1) |@| many(3)).tupled - val task = Fetch.runEnv(fetch) + val fut = Fetch.runEnv[Future](fetch) - toFuture(task).map(env => { + fut.map(env => { concurrent(env.rounds).size shouldEqual 1 }) } @@ -134,9 +127,10 @@ class FetchSyntaxTests extends AsyncFreeSpec with Matchers { val fetch: Fetch[Int] = Fetch.pure((x: Int, y: Int) => x + y).ap2(one(1), one(2)) - val task = Fetch.runEnv(fetch) + val fut = Fetch.runEnv[Future](fetch) - toFuture(task).map(env => { + fut.map( + env => { val rounds = env.rounds val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) @@ -151,83 +145,81 @@ class FetchSyntaxTests extends AsyncFreeSpec with Matchers { "`fetch` syntax allows lifting of any `Throwable` as a failure on a fetch" in { case object Ex extends RuntimeException - val e1 = Fetch - .run(Fetch.error(Ex)) - .onErrorRecoverWith({ - case Ex => Task.now("thrown") - }) + val fut1 = Fetch.run[Future](Fetch.error(Ex): Fetch[Int]) + val fut2 = Fetch.run[Future](Ex.fetch: Fetch[Int]) - val e2 = Fetch - .run(Ex.fetch) - .onErrorRecoverWith({ - case Ex => Task.now("thrown") - }) + val e1 = ME.handleErrorWith(fut1)(err => Future.successful(42)) + val e2 = ME.handleErrorWith(fut2)(err => Future.successful(42)) - toFuture(Task.mapBoth(e1, e2)(_ == _)).map(_ shouldEqual true) + ME.map2(e1, e2)(_ shouldEqual _) } "`join` syntax is equivalent to `Fetch#join`" in { val join1 = Fetch.join(one(1), many(3)) val join2 = one(1).join(many(3)) - toFuture(Task.mapBoth(Fetch.run(join1), Fetch.run(join2))(_ == _)).map(_ shouldEqual true) + ME.map2(Fetch.run[Future](join1), Fetch.run[Future](join2))(_ shouldEqual _) } "`runF` syntax is equivalent to `Fetch#runFetch`" in { - val rf1 = Fetch.runFetch(1.fetch) - val rf2 = 1.fetch.runF + val rf1 = Fetch.runFetch[Future](1.fetch) + val rf2 = 1.fetch.runF[Future] - toFuture(Task.mapBoth(rf1, rf2)(_ == _)).map(_ shouldEqual true) + ME.map2(rf1, rf2)(_ shouldEqual _) } "`runE` syntax is equivalent to `Fetch#runEnv`" in { - val rf1 = Fetch.runEnv(1.fetch) - val rf2 = 1.fetch.runE + val rf1 = Fetch.runEnv[Future](1.fetch) + val rf2 = 1.fetch.runE[Future] - toFuture(Task.mapBoth(rf1, rf2)(_ == _)).map(_ shouldEqual true) + ME.map2(rf1, rf2)(_ shouldEqual _) } "`runA` syntax is equivalent to `Fetch#run`" in { - val rf1 = Fetch.run(1.fetch) - val rf2 = 1.fetch.runA + val rf1 = Fetch.run[Future](1.fetch) + val rf2 = 1.fetch.runA[Future] - toFuture(Task.mapBoth(rf1, rf2)(_ == _)).map(_ shouldEqual true) + ME.map2(rf1, rf2)(_ shouldEqual _) } } class FetchTests extends AsyncFreeSpec with Matchers { import TestHelper._ + val ME = implicitly[FetchMonadError[Future]] + implicit def executionContext = Scheduler.Implicits.global override def newInstance = new FetchTests "We can lift plain values to Fetch" in { val fetch: Fetch[Int] = Fetch.pure(42) - Fetch.run(fetch).coeval.value shouldEqual Right(42) + Fetch.run[Future](fetch).map(_ shouldEqual 42) } "Data sources with errors throw fetch failures" in { val fetch: Fetch[Int] = Fetch(Never()) - - intercept[FetchFailure] { - Fetch.runEnv(fetch).coeval.value - } match { - case FetchFailure(env) => { - env.rounds.headOption match { - case Some(Round(_, _, OneRound(id), _, _, _)) => id shouldEqual Never() - case _ => fail("Expected Some(Round(_,_, Oneround(id),_,_,_))") - } - } - } + val fut = Fetch.runEnv[Future](fetch) + + ME.attempt(fut) + .map(xor => + xor match { + case Xor.Left(FetchFailure(env)) => { + env.rounds.headOption match { + case Some(Round(_, _, OneRound(Never()), _, _, _)) => assert(true) + case _ => fail("Should've thrown a fetch failure") + } + } + case _ => fail("Should've thrown a fetch failure") + }) } "Data sources with errors throw fetch failures that can be handled" in { val fetch: Fetch[Int] = Fetch(Never()) - - Fetch.run(fetch).onErrorHandleWith(err => Task.now(42)).coeval.value shouldEqual Right(42) + val fut = Fetch.run[Future](fetch) + ME.handleErrorWith(fut)(err => Future.successful(42)).map(_ shouldEqual 42) } "Data sources with errors and cached values throw fetch failures with the cache" in { @@ -237,7 +229,7 @@ class FetchTests extends AsyncFreeSpec with Matchers { ) intercept[FetchFailure] { - Fetch.run(fetch, cache).coeval.value + Fetch.run[Task](fetch, cache).coeval.value } match { case FetchFailure(env) => env.cache shouldEqual cache } @@ -248,29 +240,35 @@ class FetchTests extends AsyncFreeSpec with Matchers { val cache = InMemoryCache( NeverSource.identity(Never()) -> 1 ) - Fetch.run(fetch, cache).coeval.value shouldEqual Right(1) + Fetch.run[Future](fetch, cache).map(_ shouldEqual 1) } "We can lift errors to Fetch" in { val fetch: Fetch[Int] = Fetch.error(NotFound()) - intercept[NotFound] { - Fetch.run(fetch).coeval.value - } + + ME.attempt(Fetch.run[Future](fetch)) + .map(xor => + xor match { + case Xor.Left(NotFound()) => assert(true) + case _ => fail("Should've thrown NotFound exception") + }) } "We can lift handle and recover from errors in Fetch" in { - val fetch: Fetch[Int] = Fetch.error(NotFound()) + import cats.syntax.applicativeError._ - Fetch.run(fetch).onErrorHandleWith(err => Task.pure(42)).coeval.value shouldEqual Right(42) + val fetch: Fetch[Int] = Fetch.error(NotFound()) + val fut = Fetch.run[Future](fetch) + ME.handleErrorWith(fut)(err => Future.successful(42)).map(_ shouldEqual 42) } "We can lift values which have a Data Source to Fetch" in { - Fetch.run(one(1)).coeval.value shouldEqual Right(1) + Fetch.run[Future](one(1)).map(_ shouldEqual 1) } "We can map over Fetch values" in { val fetch = one(1).map(_ + 1) - Fetch.run(fetch).coeval.value shouldEqual Right(2) + Fetch.run[Future](fetch).map(_ shouldEqual 2) } "We can use fetch inside a for comprehension" in { @@ -279,7 +277,7 @@ class FetchTests extends AsyncFreeSpec with Matchers { t <- one(2) } yield (o, t) - Fetch.run(fetch).coeval.value shouldEqual Right((1, 2)) + Fetch.run[Future](fetch).map(_ shouldEqual (1, 2)) } "Monadic bind implies sequential execution" in { @@ -288,7 +286,7 @@ class FetchTests extends AsyncFreeSpec with Matchers { t <- one(2) } yield (o, t) - Fetch.runEnv(fetch).coeval.value.right.map(_.rounds.size) shouldEqual Right(2) + Fetch.runEnv[Future](fetch).map(_.rounds.size shouldEqual 2) } "We can mix data sources" in { @@ -297,25 +295,25 @@ class FetchTests extends AsyncFreeSpec with Matchers { m <- many(3) } yield (o, m) - Fetch.run(fetch).coeval.value shouldEqual Right((1, List(0, 1, 2))) + Fetch.run[Future](fetch).map(_ shouldEqual (1, List(0, 1, 2))) } "We can use Fetch as a cartesian" in { import cats.syntax.cartesian._ val fetch: Fetch[(Int, List[Int])] = (one(1) |@| many(3)).tupled - val task = Fetch.run(fetch) + val fut = Fetch.run[Future](fetch) - toFuture(task).map(_ shouldEqual (1, List(0, 1, 2))) + fut.map(_ shouldEqual (1, List(0, 1, 2))) } "We can use Fetch as an applicative" in { import cats.syntax.cartesian._ val fetch: Fetch[Int] = (one(1) |@| one(2) |@| one(3)).map(_ + _ + _) - val task = Fetch.run(fetch) + val fut = Fetch.run[Future](fetch) - toFuture(task).map(_ shouldEqual 6) + fut.map(_ shouldEqual 6) } "We can traverse over a list with a Fetch for each element" in { @@ -327,8 +325,8 @@ class FetchTests extends AsyncFreeSpec with Matchers { ones <- manies.traverse(one) } yield ones - val task = Fetch.run(fetch) - toFuture(task).map(_ shouldEqual List(0, 1, 2)) + val fut = Fetch.run[Future](fetch) + fut.map(_ shouldEqual List(0, 1, 2)) } "Traversals are implicitly concurrent" in { @@ -340,67 +338,79 @@ class FetchTests extends AsyncFreeSpec with Matchers { ones <- manies.traverse(one) } yield ones - val task = Fetch.runEnv(fetch) - - toFuture(task).map(env => { - concurrent(env.rounds).size shouldEqual 1 - }) + Fetch + .runEnv[Future](fetch) + .map(env => { + concurrent(env.rounds).size shouldEqual 1 + }) } "The product of two fetches implies parallel fetching" in { val fetch: Fetch[(Int, List[Int])] = Fetch.join(one(1), many(3)) - val task = Fetch.runEnv(fetch) - - toFuture(task).map(env => { - concurrent(env.rounds).size shouldEqual 1 - }) + Fetch + .runEnv[Future](fetch) + .map(env => { + concurrent(env.rounds).size shouldEqual 1 + }) } "Concurrent fetching calls batches only wen it can" in { val fetch: Fetch[(Int, List[Int])] = Fetch.join(one(1), many(3)) - val task = Fetch.runEnv(fetch) - - toFuture(task).map(env => { - totalBatches(env.rounds) shouldEqual 0 - }) + Fetch + .runEnv[Future](fetch) + .map(env => { + totalBatches(env.rounds) shouldEqual 0 + }) } "If a fetch fails in the left hand of a product the product will fail" in { val fetch: Fetch[(Int, List[Int])] = Fetch.join(Fetch.error(NotFound()), many(3)) - val task = Fetch.run(fetch) + val fut = Fetch.run[Future](fetch) - toFuture(task.onErrorRecoverWith({ - case ex: NotFound => Task.now("not found") - })).map(_ shouldEqual "not found") + ME.attempt(Fetch.run[Future](fetch)) + .map(xor => + xor match { + case Xor.Left(NotFound()) => assert(true) + case _ => fail("Should've thrown NotFound exception") + }) } "If a fetch fails in the right hand of a product the product will fail" in { val fetch: Fetch[(List[Int], Int)] = Fetch.join(many(3), Fetch.error(NotFound())) - val task = Fetch.run(fetch) + val fut = Fetch.run[Future](fetch) - toFuture(task.onErrorRecoverWith({ - case ex: NotFound => Task.now("not found") - })).map(_ shouldEqual "not found") + ME.attempt(Fetch.run[Future](fetch)) + .map(xor => + xor match { + case Xor.Left(NotFound()) => assert(true) + case _ => fail("Should've thrown NotFound exception") + }) } "If there is a missing identity in the left hand of a product the product will fail" in { val fetch: Fetch[(Int, List[Int])] = Fetch.join(Fetch(Never()), many(3)) - val task = Fetch.run(fetch) + val fut = Fetch.run[Future](fetch) - toFuture(task.onErrorRecoverWith({ - case ex: FetchFailure => Task.now("fail!") - })).map(_ shouldEqual "fail!") + ME.attempt(Fetch.run[Future](fetch)) + .map(xor => + xor match { + case Xor.Left(FetchFailure(_)) => assert(true) + case _ => fail("Should've thrown a fetch failure") + }) } "If there is a missing identity in the right hand of a product the product will fail" in { val fetch: Fetch[(List[Int], Int)] = Fetch.join(many(3), Fetch(Never())) - val task = Fetch.run(fetch) + val fut = Fetch.run[Future](fetch) - toFuture(task.onErrorRecoverWith({ - case ex: FetchFailure => Task.now("fail!") - })).map(_ shouldEqual "fail!") + ME.attempt(fut) + .map(xor => + xor match { + case Xor.Left(FetchFailure(_)) => assert(true) + case _ => fail("Should've thrown a fetch failure") + }) } "The product of concurrent fetches implies everything fetched concurrently" in { @@ -412,14 +422,14 @@ class FetchTests extends AsyncFreeSpec with Matchers { one(4) ) - val task = Fetch.runEnv(fetch) + Fetch + .runEnv[Future](fetch) + .map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) - toFuture(task).map(env => { - val rounds = env.rounds - val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) - - stats shouldEqual (1, 1, 4) - }) + stats shouldEqual (1, 1, 4) + }) } "The product of concurrent fetches of the same type implies everything fetched in a single batch" in { @@ -439,14 +449,14 @@ class FetchTests extends AsyncFreeSpec with Matchers { one(3) ) - val task = Fetch.runEnv(fetch) + Fetch + .runEnv[Future](fetch) + .map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) - toFuture(task).map(env => { - val rounds = env.rounds - val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) - - stats shouldEqual (2, 1, 4) - }) + stats shouldEqual (2, 1, 4) + }) } "Every level of joined concurrent fetches is combined and batched" in { @@ -463,14 +473,14 @@ class FetchTests extends AsyncFreeSpec with Matchers { } yield c ) - val task = Fetch.runEnv(fetch) + Fetch + .runEnv[Future](fetch) + .map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) - toFuture(task).map(env => { - val rounds = env.rounds - val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) - - stats shouldEqual (3, 3, 6) - }) + stats shouldEqual (3, 3, 6) + }) } "Every level of sequenced concurrent of concurrent fetches is batched" in { @@ -490,27 +500,27 @@ class FetchTests extends AsyncFreeSpec with Matchers { Fetch.sequence(List(one(15), one(16), one(17))) ) - val task = Fetch.runEnv(fetch) - - toFuture(task).map(env => { - val rounds = env.rounds - val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) + Fetch + .runEnv[Future](fetch) + .map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalBatches(rounds), totalFetched(rounds)) - stats shouldEqual (3, 3, 9 + 4 + 6) - }) + stats shouldEqual (3, 3, 9 + 4 + 6) + }) } "The product of two fetches from the same data source implies batching" in { val fetch: Fetch[(Int, Int)] = Fetch.join(one(1), one(3)) - val task = Fetch.runEnv(fetch) - - toFuture(task).map(env => { - val rounds = env.rounds - val stats = (concurrent(rounds).size, totalBatches(rounds)) + Fetch + .runEnv[Future](fetch) + .map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalBatches(rounds)) - stats shouldEqual (1, 1) - }) + stats shouldEqual (1, 1) + }) } "We can depend on previous computations of Fetch values" in { @@ -519,60 +529,56 @@ class FetchTests extends AsyncFreeSpec with Matchers { t <- one(o + 1) } yield o + t - Fetch.run(fetch).coeval.value shouldEqual Right(3) + Fetch.run[Future](fetch).map(_ shouldEqual 3) } "We can collect a list of Fetch into one" in { val sources: List[Fetch[Int]] = List(one(1), one(2), one(3)) val fetch: Fetch[List[Int]] = Fetch.sequence(sources) - val task = Fetch.run(fetch) - - toFuture(task).map(_ shouldEqual List(1, 2, 3)) + Fetch.run[Future](fetch).map(_ shouldEqual List(1, 2, 3)) } "We can collect a list of Fetches with heterogeneous sources" in { val sources: List[Fetch[Int]] = List(one(1), one(2), one(3), anotherOne(4), anotherOne(5)) val fetch: Fetch[List[Int]] = Fetch.sequence(sources) - val task = Fetch.run(fetch) - - toFuture(task).map(_ shouldEqual List(1, 2, 3, 4, 5)) + Fetch.run[Future](fetch).map(_ shouldEqual List(1, 2, 3, 4, 5)) } "Sequenced fetches are run concurrently" in { val sources: List[Fetch[Int]] = List(one(1), one(2), one(3), anotherOne(4), anotherOne(5)) val fetch: Fetch[List[Int]] = Fetch.sequence(sources) - val task = Fetch.runEnv(fetch) - - toFuture(task).map(env => { - val rounds = env.rounds - val stats = (concurrent(rounds).size, totalBatches(rounds)) + Fetch + .runEnv[Future](fetch) + .map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalBatches(rounds)) - stats shouldEqual (1, 2) - }) + stats shouldEqual (1, 2) + }) } "Sequenced fetches are deduped" in { val sources: List[Fetch[Int]] = List(one(1), one(2), one(1)) val fetch: Fetch[List[Int]] = Fetch.sequence(sources) - val task = Fetch.runEnv(fetch) + Fetch + .runEnv[Future](fetch) + .map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalFetched(rounds)) - toFuture(task).map(env => { - val rounds = env.rounds - val stats = (concurrent(rounds).size, totalFetched(rounds)) - - stats shouldEqual (1, 2) - }) + stats shouldEqual (1, 2) + }) } "Sequenced fetches are not asked for when cached" in { val sources: List[Fetch[Int]] = List(one(1), one(2), one(3), one(4)) val fetch: Fetch[List[Int]] = Fetch.sequence(sources) - val task = Fetch.runEnv( + val fut = Fetch.runEnv[Future]( fetch, InMemoryCache( OneSource.identity(One(1)) -> 1, @@ -580,7 +586,8 @@ class FetchTests extends AsyncFreeSpec with Matchers { ) ) - toFuture(task).map(env => { + fut.map( + env => { val rounds = env.rounds val stats = (concurrent(rounds).size, totalFetched(rounds)) @@ -591,32 +598,30 @@ class FetchTests extends AsyncFreeSpec with Matchers { "We can collect the results of a traversal" in { val fetch = Fetch.traverse(List(1, 2, 3))(one) - val task = Fetch.run(fetch) - - toFuture(task).map(_ shouldEqual List(1, 2, 3)) + Fetch.run[Future](fetch).map(_ shouldEqual List(1, 2, 3)) } "Traversals are run concurrently" in { val fetch = Fetch.traverse(List(1, 2, 3))(one) - val task = Fetch.runEnv(fetch) - - toFuture(task).map(env => { - concurrent(env.rounds).size shouldEqual 1 - }) + Fetch + .runEnv[Future](fetch) + .map(env => { + concurrent(env.rounds).size shouldEqual 1 + }) } "Duplicated sources are only fetched once" in { val fetch = Fetch.traverse(List(1, 2, 1))(one) - val task = Fetch.runEnv(fetch) - - toFuture(task).map(env => { - val rounds = env.rounds - val stats = (concurrent(rounds).size, totalFetched(rounds)) + Fetch + .runEnv[Future](fetch) + .map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalFetched(rounds)) - stats shouldEqual (1, 2) - }) + stats shouldEqual (1, 2) + }) } "Sources that can be fetched concurrently inside a for comprehension will be" in { @@ -625,14 +630,14 @@ class FetchTests extends AsyncFreeSpec with Matchers { result <- Fetch.traverse(v)(one) } yield result - val task = Fetch.runEnv(fetch) - - toFuture(task).map(env => { - val rounds = env.rounds - val stats = (concurrent(rounds).size, totalFetched(rounds)) + Fetch + .runEnv[Future](fetch) + .map(env => { + val rounds = env.rounds + val stats = (concurrent(rounds).size, totalFetched(rounds)) - stats shouldEqual (1, 2) - }) + stats shouldEqual (1, 2) + }) } "Elements are cached and thus not fetched more than once" in { @@ -647,13 +652,13 @@ class FetchTests extends AsyncFreeSpec with Matchers { _ <- one(1) } yield aOne + anotherOne - val task = Fetch.runEnv(fetch) - - toFuture(task).map(env => { - val rounds = env.rounds + Fetch + .runEnv[Future](fetch) + .map(env => { + val rounds = env.rounds - totalFetched(rounds) shouldEqual 3 - }) + totalFetched(rounds) shouldEqual 3 + }) } "Elements that are cached won't be fetched" in { @@ -668,7 +673,7 @@ class FetchTests extends AsyncFreeSpec with Matchers { _ <- one(1) } yield aOne + anotherOne - val task = Fetch.runEnv( + val fut = Fetch.runEnv[Future]( fetch, InMemoryCache( OneSource.identity(One(1)) -> 1, @@ -677,7 +682,8 @@ class FetchTests extends AsyncFreeSpec with Matchers { ) ) - toFuture(task).map(env => { + fut.map( + env => { val rounds = env.rounds totalFetched(rounds) shouldEqual 0 @@ -712,7 +718,7 @@ class FetchTests extends AsyncFreeSpec with Matchers { _ <- one(1) } yield aOne + anotherOne - val task = Fetch.runEnv( + val fut = Fetch.runEnv[Future]( fetch, InMemoryCache( OneSource.identity(One(1)) -> 1, @@ -722,7 +728,8 @@ class FetchTests extends AsyncFreeSpec with Matchers { ) ) - toFuture(task).map(env => { + fut.map( + env => { val rounds = env.rounds totalFetched(rounds) shouldEqual 0 @@ -745,9 +752,9 @@ class FetchTests extends AsyncFreeSpec with Matchers { _ <- one(1) } yield aOne + anotherOne - val task = Fetch.runEnv(fetch, ForgetfulCache()) + val fut = Fetch.runEnv[Future](fetch, ForgetfulCache()) - toFuture(task).map(env => { + fut.map(env => { totalFetched(env.rounds) shouldEqual 7 }) } @@ -764,9 +771,9 @@ class FetchTests extends AsyncFreeSpec with Matchers { _ <- one(1) } yield aOne + anotherOne - val task = Fetch.runEnv(fetch, ForgetfulCache()) + val fut = Fetch.runEnv[Future](fetch, ForgetfulCache()) - toFuture(task).map(env => { + fut.map(env => { totalFetched(env.rounds) shouldEqual 10 }) } From 7f9f86a9bd0a7220dc50a2b15133a3e3629d84c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Tue, 31 May 2016 21:29:38 +0200 Subject: [PATCH 13/40] wip update docs --- README.md | 134 ++++--- docs/src/tut/docs.md | 637 +----------------------------- docs/src/tut/index.md | 72 ++-- shared/src/main/scala/fetch.scala | 1 + tut/README.md | 75 ++-- 5 files changed, 170 insertions(+), 749 deletions(-) diff --git a/README.md b/README.md index 66a0c618..627562b7 100644 --- a/README.md +++ b/README.md @@ -46,32 +46,30 @@ Data Sources take two type parameters: ```scala -import monix.eval.Task import cats.data.NonEmptyList trait DataSource[Identity, Result]{ - def fetchOne(id: Identity): Task[Option[Result]] - def fetchMany(ids: NonEmptyList[Identity]): Task[Map[Identity, Result]] + def fetchOne(id: Identity): Query[Option[Result]] + def fetchMany(ids: NonEmptyList[Identity]): Query[Map[Identity, Result]] } ``` We'll implement a dummy data source that can convert integers to strings. For convenience, we define a `fetchString` function that lifts identities (`Int` in our dummy data source) to a `Fetch`. ```scala -import monix.eval.Task import cats.data.NonEmptyList import cats.std.list._ import fetch._ implicit object ToStringSource extends DataSource[Int, String]{ - override def fetchOne(id: Int): Task[Option[String]] = { - Task.now({ + override def fetchOne(id: Int): Query[Option[String]] = { + Query.later({ println(s"[${Thread.currentThread.getId}] One ToString $id") Option(id.toString) }) } - override def fetchMany(ids: NonEmptyList[Int]): Task[Map[Int, String]] = { - Task.now({ + override def fetchMany(ids: NonEmptyList[Int]): Query[Map[Int, String]] = { + Query.later({ println(s"[${Thread.currentThread.getId}] Many ToString $ids") ids.unwrap.map(i => (i, i.toString)).toMap }) @@ -94,25 +92,45 @@ val fetchOne: Fetch[String] = fetchString(1) Now that we have created a fetch, we can run it to a `Task`. Note that when we create a task we are not computing any value yet. Having a `Task` instance allows us to try to run it synchronously or asynchronously, choosing a scheduler. ```scala -val result: Task[String] = fetchOne.runA -// result: monix.eval.Task[String] = BindSuspend(,) -``` +import fetch.implicits._ +// import fetch.implicits._ -We can try to run `result` synchronously with `Task#coeval`. +import scala.concurrent._ +// import scala.concurrent._ -```scala -import monix.execution.Scheduler.Implicits.global -// import monix.execution.Scheduler.Implicits.global +import ExecutionContext.Implicits.global +// import ExecutionContext.Implicits.global -result.coeval.value -// [62] One ToString 1 -// res3: Either[monix.execution.CancelableFuture[String],String] = Right(1) +val result: Future[String] = fetchOne.runA[Future] +// result: scala.concurrent.Future[String] = List() ``` Since we calculated the results eagerly using `Task#now`, we can run this fetch synchronously. +```scala +import scala.concurrent.duration._ +// [198] One ToString 1 +// import scala.concurrent.duration._ + +Await.result(result, Duration.Inf) +// res3: String = 1 +``` + As you can see in the previous example, the `ToStringSource` is queried once to get the value of 1. + + +```scala +import scala.concurrent._ +// import scala.concurrent._ + +import scala.concurrent.duration._ +// import scala.concurrent.duration._ + +def await[A](t: Future[A]): A = Await.result(t, Duration.Inf) +// await: [A](t: scala.concurrent.Future[A])A +``` + ## Batching Multiple fetches to the same data source are automatically batched. For illustrating it, we are going to compose three independent fetch results as a tuple. @@ -122,20 +140,18 @@ import cats.syntax.cartesian._ // import cats.syntax.cartesian._ val fetchThree: Fetch[(String, String, String)] = (fetchString(1) |@| fetchString(2) |@| fetchString(3)).tupled -// fetchThree: fetch.Fetch[(String, String, String)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List(2, 3)),ToStringSource$@5491cfd8)))),),) +// fetchThree: fetch.Fetch[(String, String, String)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List(2, 3)),ToStringSource$@3cd0bf6b)))),),) -val result: Task[(String, String, String)] = fetchThree.runA -// result: monix.eval.Task[(String, String, String)] = BindSuspend(,) +val result: Future[(String, String, String)] = fetchThree.runA[Future] +// result: scala.concurrent.Future[(String, String, String)] = List() ``` - - -When executing the above fetch, note how the three identities get batched and the data source is only queried once. Let's pretend we have a function from `Task[A]` to `A` called `await`. +When executing the above fetch, note how the three identities get batched and the data source is only queried once. Let's pretend we have a function from `Future[A]` to `A` called `await`. ```scala await(result) -// [62] Many ToString OneAnd(1,List(2, 3)) +// [188] Many ToString OneAnd(1,List(2, 3)) // res4: (String, String, String) = (1,2,3) ``` @@ -143,44 +159,46 @@ await(result) If we combine two independent fetches from different data sources, the fetches can be run in parallel. First, let's add a data source that fetches a string's size. -This time, instead of creating the results with `Task#now` we are going to do it with `Task#apply` for emulating an asynchronous data source. +This time, instead of creating the results with `Query#later` we are going to do it with `Query#async` for emulating an asynchronous data source. ```scala -implicit object LengthSource extends DataSource[String, Int]{ - override def fetchOne(id: String): Task[Option[Int]] = { - Task({ - println(s"[${Thread.currentThread.getId}] One Length $id") - Option(id.size) - }) - } - override def fetchMany(ids: NonEmptyList[String]): Task[Map[String, Int]] = { - Task({ - println(s"[${Thread.currentThread.getId}] Many Length $ids") - ids.unwrap.map(i => (i, i.size)).toMap - }) - } -} - -def fetchLength(s: String): Fetch[Int] = Fetch(s) +scala> implicit object LengthSource extends DataSource[String, Int]{ + | override def fetchOne(id: String): Query[Option[Int]] = { + | Query.async((ok, fail) => { + | println(s"[${Thread.currentThread.getId}] One Length $id") + | ok(Option(id.size)) + | }) + | } + | override def fetchMany(ids: NonEmptyList[String]): Query[Map[String, Int]] = { + | Query.async((ok, fail) => { + | println(s"[${Thread.currentThread.getId}] Many Length $ids") + | ok(ids.unwrap.map(i => (i, i.size)).toMap) + | }) + | } + | } +defined object LengthSource + +scala> def fetchLength(s: String): Fetch[Int] = Fetch(s) +fetchLength: (s: String)fetch.Fetch[Int] ``` And now we can easily receive data from the two sources in a single fetch. ```scala -val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled -// fetchMulti: fetch.Fetch[(String, Int)] = Gosub(Gosub(Suspend(Concurrent(List(FetchOne(1,ToStringSource$@5491cfd8), FetchOne(one,LengthSource$@36f594ec)))),),) +scala> val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled +fetchMulti: fetch.Fetch[(String, Int)] = Gosub(Gosub(Suspend(Concurrent(List(FetchOne(1,ToStringSource$@3cd0bf6b), FetchOne(one,LengthSource$@cf8c35b)))),),) -val result = fetchMulti.runA -// result: monix.eval.Task[(String, Int)] = BindSuspend(,) +scala> val result = fetchMulti.runA[Future] +[199] One ToString 1 +result: scala.concurrent.Future[(String, Int)] = List() ``` Note how the two independent data fetches are run in parallel, minimizing the latency cost of querying the two data sources. ```scala -await(result) -// [62] One ToString 1 -// [56] One Length one -// res6: (String, Int) = (1,3) +scala> await(result) +[198] One Length one +res5: (String, Int) = (1,3) ``` ## Caching @@ -188,17 +206,17 @@ await(result) When fetching an identity, subsequent fetches for the same identity are cached. Let's try creating a fetch that asks for the same identity twice. ```scala -val fetchTwice: Fetch[(String, String)] = for { - one <- fetchString(1) - two <- fetchString(1) -} yield (one, two) -// fetchTwice: fetch.Fetch[(String, String)] = Gosub(Suspend(FetchOne(1,ToStringSource$@5491cfd8)),) +scala> val fetchTwice: Fetch[(String, String)] = for { + | one <- fetchString(1) + | two <- fetchString(1) + | } yield (one, two) +fetchTwice: fetch.Fetch[(String, String)] = Gosub(Suspend(FetchOne(1,ToStringSource$@3cd0bf6b)),) ``` While running it, notice that the data source is only queried once. The next time the identity is requested it's served from the cache. ```scala -val result: (String, String) = await(fetchTwice.runA) -// [62] One ToString 1 -// result: (String, String) = (1,1) +scala> val result: (String, String) = await(fetchTwice.runA[Future]) +[199] One ToString 1 +result: (String, String) = (1,1) ``` diff --git a/docs/src/tut/docs.md b/docs/src/tut/docs.md index 9ae37a70..245dd971 100644 --- a/docs/src/tut/docs.md +++ b/docs/src/tut/docs.md @@ -64,12 +64,11 @@ If something is missing in Fetch that stops you from using it we'd appreciate if In order to tell Fetch how to retrieve data, we must implement the `DataSource` typeclass. ```scala -import monix.eval.Task import cats.data.NonEmptyList trait DataSource[Identity, Result]{ - def fetchOne(id: Identity): Task[Option[Result]] - def fetchMany(ids: NonEmptyList[Identity]): Task[Map[Identity, Result]] + def fetchOne(id: Identity): Query[Option[Result]] + def fetchMany(ids: NonEmptyList[Identity]): Query[Map[Identity, Result]] } ``` @@ -79,15 +78,15 @@ It takes two type parameters: - `Result`: the type of the data we retrieve (a `User` if we were fetching users) There are two methods: `fetchOne` and `fetchMany`. `fetchOne` receives one identity and must return -a [Task](https://github.com/monixio/monix/blob/dd6e47b7b870b38825d516f846f6e074d78d5c40/monix-eval/shared/src/main/scala/monix/eval/Task.scala) containing +a `Query` containing an optional result. Returning an `Option` Fetch can detect whether an identity couldn't be fetched or no longer exists. -`fetchMany` method takes a non-empty list of identities and must return a `Task` containing +`fetchMany` method takes a non-empty list of identities and must return a `Query` containing a map from identities to results. Accepting a list of identities gives Fetch the ability to batch requests to the same data source, and returning a mapping from identities to results, Fetch can detect whenever an identity couldn't be fetched or no longer exists. -Returning `Task` makes it possible to try to run a fetch synchronously or asynchronously, choose a scheduler for the I/O bound nature of reading remote data, error handling, memoization and composability. +Returning `Query` makes it possible to run a fetch independently of the target monad. ## Writing your first data source @@ -102,7 +101,6 @@ case class User(id: UserId, username: String) And now we're ready to write our user data source; we'll emulate a database with an in-memory map. ```tut:silent -import monix.eval.Task import cats.data.NonEmptyList import cats.std.list._ @@ -115,14 +113,14 @@ val userDatabase: Map[UserId, User] = Map( ) implicit object UserSource extends DataSource[UserId, User]{ - override def fetchOne(id: UserId): Task[Option[User]] = { - Task.now({ + override def fetchOne(id: UserId): Query[Option[User]] = { + Query.now({ println(s"Fetching one user $id") userDatabase.get(id) }) } - override def fetchMany(ids: NonEmptyList[UserId]): Task[Map[UserId, User]] = { - Task.now({ + override def fetchMany(ids: NonEmptyList[UserId]): Query[Map[UserId, User]] = { + Query.now({ println(s"Fetching many users $ids") userDatabase.filterKeys(ids.unwrap.contains) }) @@ -142,12 +140,12 @@ def getUser(id: UserId): Fetch[User] = Fetch(id) // or, more explicitly: Fetch(i If your data source doesn't support batching, you can use the `DataSource#batchingNotSupported` method as the implementation of `fetchMany`. Note that it will use the `fetchOne` implementation for requesting identities one at a time. -```tut:silent +```scala implicit object UnbatchedSource extends DataSource[Int, Int]{ - override def fetchOne(id: Int): Task[Option[Int]] = { - Task(Option(id)) + override def fetchOne(id: Int): Query[Option[Int]] = { + Query(Option(id)) } - override def fetchMany(ids: NonEmptyList[Int]): Task[Map[Int, Int]] = { + override def fetchMany(ids: NonEmptyList[Int]): Query[Map[Int, Int]] = { batchingNotSupported(ids) } } @@ -163,31 +161,19 @@ dependencies. val fetchUser: Fetch[User] = getUser(1) ``` -A `Fetch` is just a value, and in order to be able to execute it we need to run it to a `Task` first. Running `fetchUser` will give as a `Task[User]`, which we can later execute for the performing the effects of the fetch. +A `Fetch` is just a value, and in order to be able to execute it we need to run it to a `Future` first. Running `fetchUser` will give as a `Future[User]`. ```tut:book -import fetch.syntax._ - -val user: Task[User] = fetchUser.runA -``` - -We'll try to evaluate the Fetch synchronously with `Task#coeval`. `Coeval` is a type similar to `Task` but which can be evaluated synchronously with its `.value` method. Note that for executing tasks a [monix Scheduler](http://monix.io/docs/2x/execution/scheduler.html) must be implicitly found. - -```tut:book -import monix.execution.Scheduler.Implicits.global +import scala.concurrent._ +import ExecutionContext.Implicits.global -val co = user.coeval +import fetch.implicits._ +import fetch.syntax._ -co.value +val user: Future[User] = fetchUser.runA[Future] ``` -In the previous examples, we: - -- created a fetch for a `User` using the `getUser` function -- interpreted the fetch to a `Task[User]` using the syntax `runA` that delegate to `Fetch.run` -- converted `Task[User]` to `Coeval[Either[CancelableFuture[User], User]]` and evaluated it to a `Right[User]` - -As you can see, the fetch was executed in one round to fetch the user and was finished after that. +TODO ### Sequencing @@ -203,589 +189,8 @@ val fetchTwoUsers: Fetch[(User, User)] = for { When composing fetches with `flatMap` we are telling Fetch that the second one depends on the previous one, so it isn't able to make any optimizations. When running the above fetch, we will query the user data source in two rounds: one for the user with id 1 and another for the user with id 2. ```tut:book -val result: Task[(User, User)] = fetchTwoUsers.runA +val result: Future[(User, User)] = fetchTwoUsers.runA[Future] ``` Althought `fetchTwoUsers` needs two rounds to complete we still can execute it synchronously: -```tut:book -result.coeval.value -``` - -### Batching - -If we combine two independent requests to the same data source, Fetch will -automatically batch them together into a single request. Applicative operations like the product of two fetches -help us tell the library that those fetches are independent, and thus can be batched if they use the same data source: - -```tut:silent -import cats.syntax.cartesian._ - -val fetchProduct: Fetch[(User, User)] = getUser(1).product(getUser(2)) -``` - -Note how both ids (1 and 2) are requested in a single query to the data source when executing the fetch. - -```tut:book -val result: Task[(User, User)] = fetchProduct.runA -``` - -```tut:invisible -import scala.concurrent._ -import scala.concurrent.duration._ - -def await[A](t: Task[A]): A = Await.result(t.runAsync, Duration.Inf) -``` - -Let's pretend we have a function from `Task[A]` to `A` called `await`. - -```tut:book -await(result) -``` - -### Deduplication - -If two independent requests ask for the same identity, Fetch will detect it and deduplicate the id. - -```tut:silent -val fetchDuped: Fetch[(User, User)] = getUser(1).product(getUser(1)) -``` - -Note that when running the fetch, the identity 1 is only requested once even when it is needed by both fetches. - -```tut:book -val result: Task[(User, User)] = fetchDuped.runA - -await(result) -``` - -### Caching - -During the execution of a fetch, previously requested results are implicitly cached. This allows us to write -fetches in a very modular way, asking for all the data they need as if it -was in memory; furthermore, it also avoids re-fetching an identity that may have changed -during the course of a fetch execution, which can lead to inconsistencies in the data. - -```tut:silent -val fetchCached: Fetch[(User, User)] = for { - aUser <- getUser(1) - anotherUser <- getUser(1) -} yield (aUser, anotherUser) -``` - -The above fetch asks for the same identity multiple times. Let's see what happens when executing it. - -```tut:book -val result: Task[(User, User)] = fetchCached.runA - -await(result) -``` - -As you can see, the `User` with id 1 was fetched only once in a single round-trip. The next -time it was needed we used the cached versions, thus avoiding another request to the user data -source. - -## Combining data from multiple sources - -Now that we know about some of the optimizations that Fetch can perform to read data efficiently, -let's look at how we can combine more than one data source. - - -Imagine that we are rendering a blog and have the following types for posts: - -```tut:silent -type PostId = Int -case class Post(id: PostId, author: UserId, content: String) -``` - -As you can see, every `Post` has an author, but it refers to the author by its id. We'll implement a data source for retrieving a post given a post id. - -```tut:silent -val postDatabase: Map[PostId, Post] = Map( - 1 -> Post(1, 2, "An article"), - 2 -> Post(2, 3, "Another article"), - 3 -> Post(3, 4, "Yet another article") -) - -implicit object PostSource extends DataSource[PostId, Post]{ - override def fetchOne(id: PostId): Task[Option[Post]] = { - Task({ - println(s"Fetching one post $id") - postDatabase.get(id) - }) - } - override def fetchMany(ids: NonEmptyList[PostId]): Task[Map[PostId, Post]] = { - Task({ - println(s"Fetching many posts $ids") - postDatabase.filterKeys(ids.unwrap.contains) - }) - } -} - -def getPost(id: PostId): Fetch[Post] = Fetch(id) -``` - -We can also implement a function for fetching a post's author given a post: - -```tut:silent -def getAuthor(p: Post): Fetch[User] = Fetch(p.author) -``` - -Now that we have multiple sources let's mix them in the same fetch. - -```tut:silent -val fetchMulti: Fetch[(Post, User)] = for { - post <- getPost(1) - user <- getAuthor(post) -} yield (post, user) -``` - -We can now run the previous fetch, querying the posts data source first and the user data source afterwards. - -```tut:book -val result: Task[(Post, User)] = fetchMulti.runA - -await(result) -``` - -In the previous example, we fetched a post given its id and then fetched its author. This -data could come from entirely different places, but Fetch makes working with heterogeneous sources -of data very easy. - -### Concurrency - -Combining multiple independent requests to the same data source can have two outcomes: - - - if the data sources are the same, the request is batched - - otherwise, both data sources are queried at the same time - -In the following example we are fetching from different data sources so both requests will be -evaluated together. - -```tut:silent -val fetchConcurrent: Fetch[(Post, User)] = getPost(1).product(getUser(2)) -``` - -The above example combines data from two different sources, and the library knows they are independent. - -```tut:book -val result: Task[(Post, User)] = fetchConcurrent.runA - -await(result) -``` - -Since we are interpreting the fetch to the `Id` monad, that doesn't give us any parallelism; the fetches -will be run sequentially. However, if we interpret it to a `Future` each request will run in its own logical -thread. - -## Combinators - -Besides `flatMap` for sequencing fetches and `product` for running them concurrently, Fetch provides a number of -other combinators. - -### Sequence - -Whenever we have a list of fetches of the same type and want to run them concurrently, we can use the `sequence` -combinator. It takes a `List[Fetch[A]]` and gives you back a `Fetch[List[A]]`, batching the fetches to the same -data source and running fetches to different sources in parallel. Note that the `sequence` combinator is more general and works not only on lists but on any type that has a [Traverse](http://typelevel.org/cats/tut/traverse.html) instance. - -```tut:silent -import cats.std.list._ -import cats.syntax.traverse._ - -val fetchSequence: Fetch[List[User]] = List(getUser(1), getUser(2), getUser(3)).sequence -``` - -Since `sequence` uses applicative operations internally, the library is able to perform optimizations across all the sequenced fetches. - -```tut:book -await(fetchSequence.runA) -``` - -As you can see, requests to the user data source were batched, thus fetching all the data in one round. - -### Traverse - -Another interesting combinator is `traverse`, which is the composition of `map` and `sequence`. - -```tut:silent -val fetchTraverse: Fetch[List[User]] = List(1, 2, 3).traverse(getUser) -``` - -As you may have guessed, all the optimizations made by `sequence` still apply when using `traverse`. - -```tut:book -await(fetchTraverse.runA) -``` - -# Caching - -As we have learned, Fetch caches intermediate results implicitly using a cache. You can -provide a prepopulated cache for running a fetch, replay a fetch with the cache of a previous -one, and even implement a custom cache. - -## Prepopulating a cache - -We'll be using the default in-memory cache, prepopulated with some data. The cache key of an identity -is calculated with the `DataSource`'s `identity` method. - -```tut:book -val cache = InMemoryCache(UserSource.identity(1) -> User(1, "@dialelo")) -``` - -We can pass a cache as the second argument when running a fetch with `Fetch.run`. - -```tut:book -await(fetchUser.runA(cache)) -``` - -As you can see, when all the data is cached, no query to the data sources is executed since the results are available -in the cache. - -```tut:silent -val fetchManyUsers: Fetch[List[User]] = List(1, 2, 3).traverse(getUser) -``` - -If only part of the data is cached, the cached data won't be asked for: - -```tut:book -await(fetchManyUsers.runA(cache)) -``` - -## Replaying a fetch without querying any data source - -When running a fetch, we are generally interested in its final result. However, we also have access to the cache -and information about the executed rounds once we run a fetch. Fetch's interpreter keeps its state in an environment -(implementing the `Env` trait), and we can get both the environment and result after running a fetch using `Fetch.runFetch` -instead of `Fetch.run` or `value.runF` via it's implicit syntax. - -Knowing this, we can replay a fetch reusing the cache of a previous one. The replayed fetch won't have to call any of the -data sources. - -```tut:book -val populatedCache = await(fetchManyUsers.runE.map(_.cache)) - -val result: List[User] = await(fetchManyUsers.runA(populatedCache)) -``` - -## Implementing a custom cache - -The default cache is implemented as an immutable in-memory map, but users are free to use their own caches when running a fetch. Your cache should implement the `DataSourceCache` trait, and after that you can pass it to Fetch's `run` methods. - -There is no need for the cache to be mutable since fetch executions run in an interpreter that uses the state monad. Note that the `update` method in the `DataSourceCache` trait yields a new, updated cache. - -```scala -trait DataSourceCache { - def update[A](k: DataSourceIdentity, v: A): DataSourceCache - def get(k: DataSourceIdentity): Option[Any] -} -``` - -Let's implement a cache that forgets everything we store in it. - -```tut:silent -final case class ForgetfulCache() extends DataSourceCache { - override def get(k: DataSourceIdentity): Option[Any] = None - override def update[A](k: DataSourceIdentity, v: A): ForgetfulCache = this -} -``` - -We can now use our implementation of the cache when running a fetch. - -```tut:book -val fetchSameTwice: Fetch[(User, User)] = for { - one <- getUser(1) - another <- getUser(1) -} yield (one, another) - -await(fetchSameTwice.runA(ForgetfulCache())) -``` - -# Error handling - -`Task` provides a number of combinators for dealing with and recovering from errors. One of the most interesting combinators is `attempt`, which given a `Task[A]` yields a `Task[Throwable Xor A]`. Knowing this, we can run fetches -and not worry about exceptions. Let's create a fetch that always fails when executed: - -```tut:silent -val fetchError: Fetch[User] = (new Exception("Oh noes")).fetch -``` - -If we try to execute it the exception will be thrown. - -```tut:fail -await(fetchError.runA) -``` - -We can use the `ApplicativeError[Task, Throwable]#attempt` to convert a fetch result into a disjuntion and avoid throwing exceptions. Fetch provides an implicit instance of ApplicativeError, let's import `fetch.implicits._` to have it available. - -```tut:silent -import fetch.implicits._ -import cats.ApplicativeError -import cats.data.Xor -``` - -Now we can convert `Task[User]` into `Task[Throwable Xor User]` and capture exceptions in the left of the disjunction. - -```tut:book -val safeResult: Task[Throwable Xor User] = ApplicativeError[Task, Throwable].attempt(fetchError.runA) -val finalValue: Throwable Xor User = await(safeResult) -``` - -And more succintly with Cat's applicative error syntax. - -```tut:book -import cats.syntax.applicativeError._ - -await(fetchError.runA.attempt) -``` - - -## Missing identities - -You've probably noticed that `DataSource.fetch` takes a list of identities and returns a map of identities to their result, taking -into account the possibility of some identities not being found. Whenever an identity cannot be found, the fetch execution will -fail. - -Whenever a fetch fails, a `FetchFailure` exception is thrown. The `FetchFailure` will have the environment, which gives you information -about the execution of the fetch. - -# Syntax - -## Implicit syntax - -Fetch provides implicit syntax to lift any value to the context of a `Fetch` in addition to the most common used -combinators active within `Fetch` instances. - -### pure - -Plain values can be lifted to the Fetch monad with `value.fetch`: - -```tut:silent -val fetchPure: Fetch[Int] = 42.fetch -``` - -Executing a pure fetch doesn't query any data source and can be run synchronously, as expected. - -```tut:book -fetchPure.runA.coeval.value -``` - -### error - -Errors can also be lifted to the Fetch monad via `exception.fetch`. - -```tut:silent -val fetchFail: Fetch[Int] = Fetch.error(new Exception("Something went terribly wrong")) -``` - -Note that interpreting an errorful fetch to `Task` won't throw the exception until we execute it. - -```tut:fail -await(fetchFail.runA) -``` - -### join - -We can compose two independent fetches with `fetch1.join(fetch2)`. - -```tut:silent -val fetchJoined: Fetch[(Post, User)] = getPost(1).join(getUser(2)) -``` - -If the fetches are to the same data source they will be batched; if they aren't, they will be evaluated at the same time. - -```tut:book -await(fetchJoined.runA) -``` - -### runA - -Run directly any fetch to a `Task` with `fetch1.runA`. - -```tut:book -await(getPost(1).runA) -``` - -### runE - -Extract a fetch an get it's runtime environment `fetch1.runE`. - -```tut:book -await(getPost(1).runE) -``` - -### runF - -Run a fetch obtaining the environment and final value `fetch1.runF`. - -```tut:book -await(getPost(1).runF) -``` - -## Companion object - -We've been using `cats.syntax' and `fetch.syntax` throughout the examples since it's more concise and general than the -methods in the `Fetch` companion object. However, you can use the methods in the companion object -directly. - -Note that using cats syntax gives you a plethora of combinators, much richer that what the companion object provides. - -### pure - -Plain values can be lifted to the Fetch monad with `Fetch#pure`: - -```tut:silent -val fetchPure: Fetch[Int] = Fetch.pure(42) -``` - -Executing a pure fetch doesn't query any data source and can be run synchronously, as expected. - -```tut:book -Fetch.run(fetchPure).coeval.value -``` - -### error - -Errors can also be lifted to the Fetch monad via `Fetch#error`. - -```tut:silent -val fetchFail: Fetch[Int] = Fetch.error(new Exception("Something went terribly wrong")) -``` - -Note that interpreting an errorful fetch to `Task` won't throw the exception until we execute it. - -```tut:fail -await(Fetch.run(fetchFail)) -``` - -### join - -We can compose two independent fetches with `Fetch#join`. - -```tut:silent -val fetchJoined: Fetch[(Post, User)] = Fetch.join(getPost(1), getUser(2)) -``` - -If the fetches are to the same data source they will be batched; if they aren't, they will be evaluated at the same time. - -```tut:book -await(Fetch.run(fetchJoined)) -``` - -### sequence - -The `Fetch#sequence` combinator turns a `List[Fetch[A]]` into a `Fetch[List[A]]`, running all the fetches concurrently -and batching when possible. - -```tut:silent -val fetchSequence: Fetch[List[User]] = Fetch.sequence(List(getUser(1), getUser(2), getUser(3))) -``` - -Note that `Fetch#sequence` is not as general as the `sequence` method from `Traverse`, but performs the same optimizations. - -```tut:book -await(Fetch.run(fetchSequence)) -``` - -### traverse - -The `Fetch#traverse` combinator is a combination of `map` and `sequence`. - -```tut:silent -val fetchTraverse: Fetch[List[User]] = Fetch.traverse(List(1, 2, 3))(getUser) -``` - -Note that `Fetch#traverse` is not as general as the `traverse` method from `Traverse`, but performs the same optimizations. - -```tut:book -await(Fetch.run(fetchTraverse)) -``` - -## cats - -Fetch is built using cats' Free monad construction and thus works out of the box with -cats syntax. Using cats' syntax, we can make fetch declarations more concise, without -the need to use the combinators in the `Fetch` companion object. - -Fetch provides its own instance of `Applicative[Fetch]`. Whenever we use applicative -operations on more than one `Fetch`, we know that the fetches are independent meaning -we can perform optimizations such as batching and concurrent requests. - -If we were to use the default `Applicative[Fetch]` operations, which are implemented in terms of `flatMap`, -we wouldn't have information about the independency of multiple fetches. - -### Applicative - -The `|@|` operator allows us to combine multiple independent fetches, even when they -are from different types, and apply a pure function to their results. We can use it -as a more powerful alternative to the `product` method or `Fetch#join`: - -```tut:silent -import cats.syntax.cartesian._ - -val fetchThree: Fetch[(Post, User, Post)] = (getPost(1) |@| getUser(2) |@| getPost(2)).tupled -``` - -Notice how the queries to posts are batched. - -```tut:book -await(fetchThree.runA) -``` - -More interestingly, we can use it to apply a pure function to the results of various -fetches. - -```tut:book -val fetchFriends: Fetch[String] = (getUser(1) |@| getUser(2)).map({ (one, other) => - s"${one.username} is friends with ${other.username}" -}) - -await(fetchFriends.runA) -``` - -The above example is equivalent to the following using the `Fetch#join` method: - -```tut:book -val fetchFriends: Fetch[String] = Fetch.join(getUser(1), getUser(2)).map({ case (one, other) => - s"${one.username} is friends with ${other.username}" -}) - -await(fetchFriends.runA) -``` - -# Choosing a scheduler - -The [Monix docs](http://monix.io/docs/2x/execution/scheduler.html) go in great detail about how scheduling works and you should refer there for the documentation. - -## JVM (Scala) - -When reading data in the JVM, you may want to create an unbounded thread pool with `monix.execution.Scheduler.io` for running your fetches. - -```tut:book -import monix.execution.Scheduler - -// unbounded thread pool for I/O bound tasks -implicit val ioScheduler: Scheduler = Scheduler.io(name="my-io-scheduler") - -await(fetchFriends.runA) -``` - -## JS (Scala.js) - -When needing to choose a scheduler in a JS environment with Scala.js refer to the [monix docs](http://monix.io/docs/2x/execution/scheduler.html#builders-for-javascript). - -# Resources - -- [Code](https://github.com/47deg/fetch) on GitHub. -- [Documentation site](http://47deg.github.io/fetch/) -- [Fetch: Simple & Efficient data access](https://www.youtube.com/watch?v=45fcKYFb0EU) talk at [Typelevel Summit in Oslo](http://typelevel.org/event/2016-05-summit-oslo/) - -# Acknowledgements - -Fetch stands on the shoulders of giants: - -- [Haxl](https://github.com/facebook/haxl) is Facebook's implementation (Haskell) of the [original paper Fetch is based on](http://community.haskell.org/~simonmar/papers/haxl-icfp14.pdf). -- [Clump](http://getclump.io) has inspired the signature of the `DataSource#fetch` method. -- [Stitch](https://engineering.twitter.com/university/videos/introducing-stitch) is an in-house Twitter library that is not open source but has inspired Fetch's high-level API. -- [Cats](http://typelevel.org/cats/), a library for functional programming in Scala. -- [Monix](https://monix.io) high-performance and multiplatform (Scala / Scala.js) asynchronous programming library. diff --git a/docs/src/tut/index.md b/docs/src/tut/index.md index d2785d00..3c40143d 100644 --- a/docs/src/tut/index.md +++ b/docs/src/tut/index.md @@ -49,32 +49,32 @@ Data Sources take two type parameters: ```scala -import monix.eval.Task import cats.data.NonEmptyList trait DataSource[Identity, Result]{ - def fetchOne(id: Identity): Task[Option[Result]] - def fetchMany(ids: NonEmptyList[Identity]): Task[Map[Identity, Result]] + def fetchOne(id: Identity): Query[Option[Result]] + def fetchMany(ids: NonEmptyList[Identity]): Query[Map[Identity, Result]] } ``` +Note that when we create a query we can choose to compute its result right away (`Query#now`), defer its evaluation (`Query#later`) or make it asynchronous (`Query#async`). Returning `Query` instances from the fetch methods allows us to abstract from the target result type and to run it synchronously or asynchronously. + We'll implement a dummy data source that can convert integers to strings. For convenience, we define a `fetchString` function that lifts identities (`Int` in our dummy data source) to a `Fetch`. ```tut:silent -import monix.eval.Task import cats.data.NonEmptyList import cats.std.list._ import fetch._ implicit object ToStringSource extends DataSource[Int, String]{ - override def fetchOne(id: Int): Task[Option[String]] = { - Task.now({ + override def fetchOne(id: Int): Query[Option[String]] = { + Query.later({ println(s"[${Thread.currentThread.getId}] One ToString $id") Option(id.toString) }) } - override def fetchMany(ids: NonEmptyList[Int]): Task[Map[Int, String]] = { - Task.now({ + override def fetchMany(ids: NonEmptyList[Int]): Query[Map[Int, String]] = { + Query.later({ println(s"[${Thread.currentThread.getId}] Many ToString $ids") ids.unwrap.map(i => (i, i.toString)).toMap }) @@ -94,66 +94,61 @@ import fetch.syntax._ val fetchOne: Fetch[String] = fetchString(1) ``` -Now that we have created a fetch, we can run it to a `Task`. Note that when we create a task we are not computing any value yet. Having a `Task` instance allows us to try to run it synchronously or asynchronously, choosing a scheduler. +We'll run our fetches to the well-known `Future` type in our examples, let's do some standard imports. -```tut:book -val result: Task[String] = fetchOne.runA +```tut:silent +import scala.concurrent._ +import ExecutionContext.Implicits.global +import scala.concurrent.duration._ + +// can only define this in Scala, not in Scala.js +def await[A](t: Future[A]): A = Await.result(t, Duration.Inf) ``` -We can try to run `result` synchronously with `Task#coeval`. +And wait for the fetch to complete, note that you cannot block for a Future in Scala.js: ```tut:book -import monix.execution.Scheduler.Implicits.global +import fetch.implicits._ -result.coeval.value +await(fetchOne.runA[Future]) ``` -Since we calculated the results eagerly using `Task#now`, we can run this fetch synchronously. - As you can see in the previous example, the `ToStringSource` is queried once to get the value of 1. ## Batching Multiple fetches to the same data source are automatically batched. For illustrating it, we are going to compose three independent fetch results as a tuple. -```tut:book +```tut:silent import cats.syntax.cartesian._ val fetchThree: Fetch[(String, String, String)] = (fetchString(1) |@| fetchString(2) |@| fetchString(3)).tupled -val result: Task[(String, String, String)] = fetchThree.runA ``` -```tut:invisible -import scala.concurrent._ -import scala.concurrent.duration._ - -def await[A](t: Task[A]): A = Await.result(t.runAsync, Duration.Inf) -``` - -When executing the above fetch, note how the three identities get batched and the data source is only queried once. Let's pretend we have a function from `Task[A]` to `A` called `await`. +When executing the above fetch, note how the three identities get batched and the data source is only queried once. ```tut:book -await(result) +await(fetchThree.runA[Future]) ``` ## Parallelism If we combine two independent fetches from different data sources, the fetches can be run in parallel. First, let's add a data source that fetches a string's size. -This time, instead of creating the results with `Task#now` we are going to do it with `Task#apply` for emulating an asynchronous data source. +This time, instead of creating the results with `Query#later` we are going to do it with `Query#async` for emulating an asynchronous data source. ```tut:silent implicit object LengthSource extends DataSource[String, Int]{ - override def fetchOne(id: String): Task[Option[Int]] = { - Task({ + override def fetchOne(id: String): Query[Option[Int]] = { + Query.async((ok, fail) => { println(s"[${Thread.currentThread.getId}] One Length $id") - Option(id.size) + ok(Option(id.size)) }) } - override def fetchMany(ids: NonEmptyList[String]): Task[Map[String, Int]] = { - Task({ + override def fetchMany(ids: NonEmptyList[String]): Query[Map[String, Int]] = { + Query.async((ok, fail) => { println(s"[${Thread.currentThread.getId}] Many Length $ids") - ids.unwrap.map(i => (i, i.size)).toMap + ok(ids.unwrap.map(i => (i, i.size)).toMap) }) } } @@ -163,22 +158,21 @@ def fetchLength(s: String): Fetch[Int] = Fetch(s) And now we can easily receive data from the two sources in a single fetch. -```tut:book +```tut:silent val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled -val result = fetchMulti.runA ``` Note how the two independent data fetches are run in parallel, minimizing the latency cost of querying the two data sources. ```tut:book -await(result) +await(fetchMulti.runA[Future]) ``` ## Caching When fetching an identity, subsequent fetches for the same identity are cached. Let's try creating a fetch that asks for the same identity twice. -```tut:book +```tut:silent val fetchTwice: Fetch[(String, String)] = for { one <- fetchString(1) two <- fetchString(1) @@ -188,6 +182,6 @@ val fetchTwice: Fetch[(String, String)] = for { While running it, notice that the data source is only queried once. The next time the identity is requested it's served from the cache. ```tut:book -val result: (String, String) = await(fetchTwice.runA) +await(fetchTwice.runA[Future]) ``` diff --git a/shared/src/main/scala/fetch.scala b/shared/src/main/scala/fetch.scala index ef1b9009..93e0d82a 100644 --- a/shared/src/main/scala/fetch.scala +++ b/shared/src/main/scala/fetch.scala @@ -35,6 +35,7 @@ object Query { type Callback[A] = A => Unit type Errback = Throwable => Unit + def apply[A](x: A): Query[A] = Now(x) def now[A](x: A): Query[A] = Now(x) def later[A](th: => A): Query[A] = Later(th _) def async[A]( diff --git a/tut/README.md b/tut/README.md index 41f71a75..51cb5698 100644 --- a/tut/README.md +++ b/tut/README.md @@ -51,32 +51,30 @@ Data Sources take two type parameters: ```scala -import monix.eval.Task import cats.data.NonEmptyList trait DataSource[Identity, Result]{ - def fetchOne(id: Identity): Task[Option[Result]] - def fetchMany(ids: NonEmptyList[Identity]): Task[Map[Identity, Result]] + def fetchOne(id: Identity): Query[Option[Result]] + def fetchMany(ids: NonEmptyList[Identity]): Query[Map[Identity, Result]] } ``` We'll implement a dummy data source that can convert integers to strings. For convenience, we define a `fetchString` function that lifts identities (`Int` in our dummy data source) to a `Fetch`. ```tut:silent -import monix.eval.Task import cats.data.NonEmptyList import cats.std.list._ import fetch._ implicit object ToStringSource extends DataSource[Int, String]{ - override def fetchOne(id: Int): Task[Option[String]] = { - Task.now({ + override def fetchOne(id: Int): Query[Option[String]] = { + Query.later({ println(s"[${Thread.currentThread.getId}] One ToString $id") Option(id.toString) }) } - override def fetchMany(ids: NonEmptyList[Int]): Task[Map[Int, String]] = { - Task.now({ + override def fetchMany(ids: NonEmptyList[Int]): Query[Map[Int, String]] = { + Query.later({ println(s"[${Thread.currentThread.getId}] Many ToString $ids") ids.unwrap.map(i => (i, i.toString)).toMap }) @@ -99,21 +97,32 @@ val fetchOne: Fetch[String] = fetchString(1) Now that we have created a fetch, we can run it to a `Task`. Note that when we create a task we are not computing any value yet. Having a `Task` instance allows us to try to run it synchronously or asynchronously, choosing a scheduler. ```tut:book -val result: Task[String] = fetchOne.runA +import fetch.implicits._ + +import scala.concurrent._ +import ExecutionContext.Implicits.global + +val result: Future[String] = fetchOne.runA[Future] ``` -We can try to run `result` synchronously with `Task#coeval`. +Since we calculated the results eagerly using `Task#now`, we can run this fetch synchronously. ```tut:book -import monix.execution.Scheduler.Implicits.global +import scala.concurrent.duration._ -result.coeval.value +Await.result(result, Duration.Inf) ``` -Since we calculated the results eagerly using `Task#now`, we can run this fetch synchronously. - As you can see in the previous example, the `ToStringSource` is queried once to get the value of 1. + +```tut:book +import scala.concurrent._ +import scala.concurrent.duration._ + +def await[A](t: Future[A]): A = Await.result(t, Duration.Inf) +``` + ## Batching Multiple fetches to the same data source are automatically batched. For illustrating it, we are going to compose three independent fetch results as a tuple. @@ -122,17 +131,11 @@ Multiple fetches to the same data source are automatically batched. For illustra import cats.syntax.cartesian._ val fetchThree: Fetch[(String, String, String)] = (fetchString(1) |@| fetchString(2) |@| fetchString(3)).tupled -val result: Task[(String, String, String)] = fetchThree.runA +val result: Future[(String, String, String)] = fetchThree.runA[Future] ``` -```tut:invisible -import scala.concurrent._ -import scala.concurrent.duration._ - -def await[A](t: Task[A]): A = Await.result(t.runAsync, Duration.Inf) -``` -When executing the above fetch, note how the three identities get batched and the data source is only queried once. Let's pretend we have a function from `Task[A]` to `A` called `await`. +When executing the above fetch, note how the three identities get batched and the data source is only queried once. Let's pretend we have a function from `Future[A]` to `A` called `await`. ```tut:book await(result) @@ -142,20 +145,20 @@ await(result) If we combine two independent fetches from different data sources, the fetches can be run in parallel. First, let's add a data source that fetches a string's size. -This time, instead of creating the results with `Task#now` we are going to do it with `Task#apply` for emulating an asynchronous data source. +This time, instead of creating the results with `Query#later` we are going to do it with `Query#async` for emulating an asynchronous data source. -```tut:silent +```tuto:silent implicit object LengthSource extends DataSource[String, Int]{ - override def fetchOne(id: String): Task[Option[Int]] = { - Task({ + override def fetchOne(id: String): Query[Option[Int]] = { + Query.async((ok, fail) => { println(s"[${Thread.currentThread.getId}] One Length $id") - Option(id.size) + ok(Option(id.size)) }) } - override def fetchMany(ids: NonEmptyList[String]): Task[Map[String, Int]] = { - Task({ + override def fetchMany(ids: NonEmptyList[String]): Query[Map[String, Int]] = { + Query.async((ok, fail) => { println(s"[${Thread.currentThread.getId}] Many Length $ids") - ids.unwrap.map(i => (i, i.size)).toMap + ok(ids.unwrap.map(i => (i, i.size)).toMap) }) } } @@ -165,14 +168,14 @@ def fetchLength(s: String): Fetch[Int] = Fetch(s) And now we can easily receive data from the two sources in a single fetch. -```tut:book +```tuto:book val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled -val result = fetchMulti.runA +val result = fetchMulti.runA[Future] ``` Note how the two independent data fetches are run in parallel, minimizing the latency cost of querying the two data sources. -```tut:book +```tuto:book await(result) ``` @@ -180,7 +183,7 @@ await(result) When fetching an identity, subsequent fetches for the same identity are cached. Let's try creating a fetch that asks for the same identity twice. -```tut:book +```tuto:book val fetchTwice: Fetch[(String, String)] = for { one <- fetchString(1) two <- fetchString(1) @@ -189,6 +192,6 @@ val fetchTwice: Fetch[(String, String)] = for { While running it, notice that the data source is only queried once. The next time the identity is requested it's served from the cache. -```tut:book -val result: (String, String) = await(fetchTwice.runA) +```tuto:book +val result: (String, String) = await(fetchTwice.runA[Future]) ``` From 02cfb03e4380534dfbb2eef1e8b0d0489af77fc1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Tue, 31 May 2016 21:59:12 +0200 Subject: [PATCH 14/40] Extract Monix integration into a subproject --- build.sbt | 27 ++++++++++------- monix/shared/src/main/scala/implicits.scala | 33 +++++++++++++++++++++ shared/src/main/scala/datasource.scala | 1 - shared/src/main/scala/implicits.scala | 31 +------------------ shared/src/main/scala/syntax.scala | 2 -- shared/src/test/FetchAsyncQueryTests.scala | 3 +- shared/src/test/scala/FetchTests.scala | 17 +++++------ 7 files changed, 60 insertions(+), 54 deletions(-) create mode 100644 monix/shared/src/main/scala/implicits.scala diff --git a/build.sbt b/build.sbt index e19b7af3..c1463ed0 100644 --- a/build.sbt +++ b/build.sbt @@ -18,8 +18,6 @@ lazy val commonSettings = Seq( resolvers += Resolver.sonatypeRepo("releases"), libraryDependencies ++= Seq( "org.typelevel" %%% "cats" % "0.6.0", - "io.monix" %%% "monix-eval" % "2.0-RC5", - "io.monix" %%% "monix-cats" % "2.0-RC5", "org.scalatest" %%% "scalatest" % "3.0.0-M7" % "test", compilerPlugin( "org.spire-math" %% "kind-projector" % "0.7.1" @@ -63,10 +61,7 @@ lazy val docsSettings = ghpages.settings ++ buildSettings ++ tutSettings ++ Seq( tutSourceDirectory := sourceDirectory.value / "tut", tutTargetDirectory := sourceDirectory.value / "jekyll", tutScalacOptions ~= (_.filterNot(Set("-Ywarn-unused-import", "-Ywarn-dead-code"))), - aggregate in doc := true, - libraryDependencies ++= Seq( - "io.monix" %%% "monix-eval" % "2.0-RC3" - ) + aggregate in doc := true ) lazy val docs = (project in file("docs")) @@ -121,10 +116,7 @@ lazy val readmeSettings = buildSettings ++ tutSettings ++ Seq( tutSourceDirectory := baseDirectory.value, tutTargetDirectory := baseDirectory.value.getParentFile, tutScalacOptions ~= (_.filterNot(Set("-Ywarn-unused-import", "-Ywarn-dead-code"))), - tutNameFilter := """README.md""".r, - libraryDependencies ++= Seq( - "io.monix" %%% "monix-eval" % "2.0-RC3" - ) + tutNameFilter := """README.md""".r ) lazy val readme = (project in file("tut")) @@ -135,3 +127,18 @@ lazy val readme = (project in file("tut")) .settings(readmeSettings: _*) .settings(noPublishSettings) +lazy val monixSettings = ( + libraryDependencies ++= Seq( + "io.monix" %%% "monix-eval" % "2.0-RC5", + "io.monix" %%% "monix-cats" % "2.0-RC5" + ) +) + +lazy val monix = crossProject.in(file("monix")) + .settings(moduleName := "fetch-monix") + .settings(allSettings:_*) + .jsSettings(fetchJSSettings:_*) + .settings(monixSettings: _*) + .enablePlugins(AutomateHeaderPlugin) + + diff --git a/monix/shared/src/main/scala/implicits.scala b/monix/shared/src/main/scala/implicits.scala new file mode 100644 index 00000000..abc6c3d7 --- /dev/null +++ b/monix/shared/src/main/scala/implicits.scala @@ -0,0 +1,33 @@ +package fetch + +import monix.eval.Task +import monix.execution.Cancelable +import monix.execution.Scheduler + +object monix { + implicit val fetchTaskFetchMonadError: FetchMonadError[Task] = new FetchMonadError[Task] { + override def runQuery[A](j: Query[A]): Task[A] = j match { + case Now(x) => Task.now(x) + case Later(x) => Task.evalAlways({ x() }) + case Async(ac) => + Task.create( + (scheduler, callback) => { + + scheduler.execute(new Runnable { + def run() = ac(callback.onSuccess, callback.onError) + }) + + Cancelable.empty + }) + } + + def pure[A](x: A): Task[A] = Task.now(x) + def handleErrorWith[A](fa: monix.eval.Task[A])( + f: Throwable => monix.eval.Task[A]): monix.eval.Task[A] = fa.onErrorHandleWith(f) + override def ap[A, B](f: Task[A => B])(x: Task[A]): Task[B] = + Task.mapBoth(f, x)((f, x) => f(x)) + def raiseError[A](e: Throwable): monix.eval.Task[A] = Task.raiseError(e) + def flatMap[A, B](fa: monix.eval.Task[A])(f: A => monix.eval.Task[B]): monix.eval.Task[B] = + fa.flatMap(f) + } +} diff --git a/shared/src/main/scala/datasource.scala b/shared/src/main/scala/datasource.scala index 61d0beb2..fd442231 100644 --- a/shared/src/main/scala/datasource.scala +++ b/shared/src/main/scala/datasource.scala @@ -16,7 +16,6 @@ package fetch -import monix.eval.Task import cats.data.NonEmptyList import cats.std.list._ diff --git a/shared/src/main/scala/implicits.scala b/shared/src/main/scala/implicits.scala index 7017dfc5..d268cbef 100644 --- a/shared/src/main/scala/implicits.scala +++ b/shared/src/main/scala/implicits.scala @@ -16,39 +16,10 @@ package fetch -import monix.eval.Task -import monix.execution.Cancelable -import monix.execution.Scheduler -import cats.ApplicativeError +import cats.Eval import scala.concurrent.{Promise, Future, ExecutionContext} object implicits { - implicit val fetchTaskFetchMonadError: FetchMonadError[Task] = new FetchMonadError[Task] { - override def runQuery[A](j: Query[A]): Task[A] = j match { - case Now(x) => Task.now(x) - case Later(x) => Task.evalAlways({ x() }) - case Async(ac) => - Task.create( - (scheduler, callback) => { - - scheduler.execute(new Runnable { - def run() = ac(callback.onSuccess, callback.onError) - }) - - Cancelable.empty - }) - } - - def pure[A](x: A): Task[A] = Task.now(x) - def handleErrorWith[A](fa: monix.eval.Task[A])( - f: Throwable => monix.eval.Task[A]): monix.eval.Task[A] = fa.onErrorHandleWith(f) - override def ap[A, B](f: Task[A => B])(x: Task[A]): Task[B] = - Task.mapBoth(f, x)((f, x) => f(x)) - def raiseError[A](e: Throwable): monix.eval.Task[A] = Task.raiseError(e) - def flatMap[A, B](fa: monix.eval.Task[A])(f: A => monix.eval.Task[B]): monix.eval.Task[B] = - fa.flatMap(f) - } - implicit def fetchFutureFetchMonadError( implicit ec: ExecutionContext ): FetchMonadError[Future] = new FetchMonadError[Future] { diff --git a/shared/src/main/scala/syntax.scala b/shared/src/main/scala/syntax.scala index 548fe411..e1463329 100644 --- a/shared/src/main/scala/syntax.scala +++ b/shared/src/main/scala/syntax.scala @@ -16,8 +16,6 @@ package fetch -import monix.eval.Task - object syntax { /** Implicit syntax to lift any value to the context of Fetch via pure */ diff --git a/shared/src/test/FetchAsyncQueryTests.scala b/shared/src/test/FetchAsyncQueryTests.scala index 8e79bfc8..8a8a9d03 100644 --- a/shared/src/test/FetchAsyncQueryTests.scala +++ b/shared/src/test/FetchAsyncQueryTests.scala @@ -19,14 +19,13 @@ import scala.concurrent.duration._ import org.scalatest._ -import monix.execution.Scheduler import cats.data.NonEmptyList import cats.std.list._ import fetch._ import fetch.implicits._ class FetchAsyncQueryTests extends AsyncFreeSpec with Matchers { - implicit def executionContext = Scheduler.Implicits.global + implicit def executionContext = ExecutionContext.Implicits.global override def newInstance = new FetchAsyncQueryTests case class ArticleId(id: Int) diff --git a/shared/src/test/scala/FetchTests.scala b/shared/src/test/scala/FetchTests.scala index 745392ee..05d88491 100644 --- a/shared/src/test/scala/FetchTests.scala +++ b/shared/src/test/scala/FetchTests.scala @@ -19,8 +19,6 @@ import scala.concurrent.duration._ import org.scalatest._ -import monix.eval._ -import monix.execution.Scheduler import cats.{MonadError} import cats.data.{NonEmptyList, Xor} import cats.std.list._ @@ -107,7 +105,7 @@ class FetchSyntaxTests extends AsyncFreeSpec with Matchers { val ME = implicitly[FetchMonadError[Future]] - implicit def executionContext = Scheduler.Implicits.global + implicit def executionContext = ExecutionContext.Implicits.global override def newInstance = new FetchSyntaxTests "Cartesian syntax is implicitly concurrent" in { @@ -191,7 +189,7 @@ class FetchTests extends AsyncFreeSpec with Matchers { val ME = implicitly[FetchMonadError[Future]] - implicit def executionContext = Scheduler.Implicits.global + implicit def executionContext = ExecutionContext.Implicits.global override def newInstance = new FetchTests "We can lift plain values to Fetch" in { @@ -228,11 +226,12 @@ class FetchTests extends AsyncFreeSpec with Matchers { OneSource.identity(One(1)) -> 1 ) - intercept[FetchFailure] { - Fetch.run[Task](fetch, cache).coeval.value - } match { - case FetchFailure(env) => env.cache shouldEqual cache - } + ME.attempt(Fetch.run[Future](fetch, cache)) + .map(xor => + xor match { + case Xor.Left(FetchFailure(env)) => env.cache shouldEqual cache + case _ => fail("Cache should be populated") + }) } "Data sources with errors won't fail if they're cached" in { From 620c1e8a50e5a10fc9598209eb8cb5bb225466c6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Tue, 31 May 2016 21:59:29 +0200 Subject: [PATCH 15/40] Unsafe FetchMonadError instance for Eval --- jvm/src/main/scala/implicits.scala | 63 ++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 jvm/src/main/scala/implicits.scala diff --git a/jvm/src/main/scala/implicits.scala b/jvm/src/main/scala/implicits.scala new file mode 100644 index 00000000..67548a38 --- /dev/null +++ b/jvm/src/main/scala/implicits.scala @@ -0,0 +1,63 @@ +/* + * Copyright 2016 47 Degrees, LLC. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package fetch + +import cats.Eval +import cats.data.Xor + +object unsafeImplicits { + implicit val fetchEvalFetchMonadError: FetchMonadError[Eval] = new FetchMonadError[Eval] { + override def runQuery[A](j: Query[A]): Eval[A] = j match { + case Now(x) => Eval.now(x) + case Later(x) => Eval.later({ x() }) + case Async(ac) => + Eval.later({ + val latch = new java.util.concurrent.CountDownLatch(1) + @volatile var result: Xor[Throwable, A] = null + new Thread(new Runnable { + def run() = { + ac(a => { + result = Xor.Right(a); + latch.countDown + }, err => { + result = Xor.Left(err); + latch.countDown + }) + } + }).start() + latch.await + result match { + case Xor.Left(err) => throw err + case Xor.Right(v) => v + } + }) + } + + def pure[A](x: A): Eval[A] = Eval.now(x) + def handleErrorWith[A](fa: Eval[A])(f: Throwable => Eval[A]): Eval[A] = + Eval.later({ + try { + fa.value + } catch { + case ex: Throwable => f(ex).value + } + }) + def raiseError[A](e: Throwable): Eval[A] = Eval.later({ throw e }) + def flatMap[A, B](fa: Eval[A])(f: A => Eval[B]): Eval[B] = + fa.flatMap(f) + } +} From b775ea967007c66050195a4923e565d5ece6688f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Wed, 1 Jun 2016 21:45:38 +0200 Subject: [PATCH 16/40] Stabilize Query type and support automatic batching --- build.sbt | 3 + jvm/src/main/scala/implicits.scala | 31 +---- monix/shared/src/main/scala/implicits.scala | 46 +++++-- .../src/test/scala/FetchTaskTests.scala | 128 ++++++++++++++++++ shared/src/main/scala/datasource.scala | 35 +++-- shared/src/main/scala/fetch.scala | 40 ++++-- shared/src/main/scala/implicits.scala | 11 +- shared/src/main/scala/syntax.scala | 9 ++ .../{ => scala}/FetchAsyncQueryTests.scala | 8 +- 9 files changed, 235 insertions(+), 76 deletions(-) create mode 100644 monix/shared/src/test/scala/FetchTaskTests.scala rename shared/src/test/{ => scala}/FetchAsyncQueryTests.scala (93%) diff --git a/build.sbt b/build.sbt index c1463ed0..b1f6cf35 100644 --- a/build.sbt +++ b/build.sbt @@ -135,6 +135,7 @@ lazy val monixSettings = ( ) lazy val monix = crossProject.in(file("monix")) + .dependsOn(fetch) .settings(moduleName := "fetch-monix") .settings(allSettings:_*) .jsSettings(fetchJSSettings:_*) @@ -142,3 +143,5 @@ lazy val monix = crossProject.in(file("monix")) .enablePlugins(AutomateHeaderPlugin) +lazy val fetchMonixJVM = monix.jvm +lazy val fetchMonixJS = monix.js diff --git a/jvm/src/main/scala/implicits.scala b/jvm/src/main/scala/implicits.scala index 67548a38..129637a0 100644 --- a/jvm/src/main/scala/implicits.scala +++ b/jvm/src/main/scala/implicits.scala @@ -19,34 +19,15 @@ package fetch import cats.Eval import cats.data.Xor +import scala.concurrent._ +import scala.concurrent.duration._ + object unsafeImplicits { implicit val fetchEvalFetchMonadError: FetchMonadError[Eval] = new FetchMonadError[Eval] { - override def runQuery[A](j: Query[A]): Eval[A] = j match { - case Now(x) => Eval.now(x) - case Later(x) => Eval.later({ x() }) - case Async(ac) => - Eval.later({ - val latch = new java.util.concurrent.CountDownLatch(1) - @volatile var result: Xor[Throwable, A] = null - new Thread(new Runnable { - def run() = { - ac(a => { - result = Xor.Right(a); - latch.countDown - }, err => { - result = Xor.Left(err); - latch.countDown - }) - } - }).start() - latch.await - result match { - case Xor.Left(err) => throw err - case Xor.Right(v) => v - } - }) + override def runQuery[A](j: Query[A]): Eval[A] = { + // TODO + ??? } - def pure[A](x: A): Eval[A] = Eval.now(x) def handleErrorWith[A](fa: Eval[A])(f: Throwable => Eval[A]): Eval[A] = Eval.later({ diff --git a/monix/shared/src/main/scala/implicits.scala b/monix/shared/src/main/scala/implicits.scala index abc6c3d7..dc1334de 100644 --- a/monix/shared/src/main/scala/implicits.scala +++ b/monix/shared/src/main/scala/implicits.scala @@ -1,15 +1,31 @@ -package fetch +/* + * Copyright 2016 47 Degrees, LLC. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import monix.eval.Task -import monix.execution.Cancelable -import monix.execution.Scheduler +package fetch.monix -object monix { +import fetch._ + +import _root_.monix.eval.Task +import _root_.monix.execution.{Scheduler, Cancelable} + +object implicits { implicit val fetchTaskFetchMonadError: FetchMonadError[Task] = new FetchMonadError[Task] { override def runQuery[A](j: Query[A]): Task[A] = j match { - case Now(x) => Task.now(x) - case Later(x) => Task.evalAlways({ x() }) - case Async(ac) => + case Sync(x) => pureEval(x) + case Async(ac, timeout) => Task.create( (scheduler, callback) => { @@ -19,15 +35,21 @@ object monix { Cancelable.empty }) + case Ap(qf, qx) => + Task + .zip2(runQuery(qf), runQuery(qx)) + .map({ + case (f, x) => f(x) + }) } def pure[A](x: A): Task[A] = Task.now(x) - def handleErrorWith[A](fa: monix.eval.Task[A])( - f: Throwable => monix.eval.Task[A]): monix.eval.Task[A] = fa.onErrorHandleWith(f) + def handleErrorWith[A](fa: Task[A])(f: Throwable => Task[A]): Task[A] = + fa.onErrorHandleWith(f) override def ap[A, B](f: Task[A => B])(x: Task[A]): Task[B] = Task.mapBoth(f, x)((f, x) => f(x)) - def raiseError[A](e: Throwable): monix.eval.Task[A] = Task.raiseError(e) - def flatMap[A, B](fa: monix.eval.Task[A])(f: A => monix.eval.Task[B]): monix.eval.Task[B] = + def raiseError[A](e: Throwable): Task[A] = Task.raiseError(e) + def flatMap[A, B](fa: Task[A])(f: A => Task[B]): Task[B] = fa.flatMap(f) } } diff --git a/monix/shared/src/test/scala/FetchTaskTests.scala b/monix/shared/src/test/scala/FetchTaskTests.scala new file mode 100644 index 00000000..0a7545b3 --- /dev/null +++ b/monix/shared/src/test/scala/FetchTaskTests.scala @@ -0,0 +1,128 @@ +/* + * Copyright 2016 47 Degrees, LLC. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import monix.eval.Task +import monix.execution.Scheduler + +import org.scalatest._ + +import cats.data.NonEmptyList +import cats.std.list._ + +import fetch._ +import fetch.monix.implicits._ + +import scala.concurrent.Future + +class FetchTaskTests extends AsyncFreeSpec with Matchers { + implicit def executionContext = Scheduler.Implicits.global + override def newInstance = new FetchTaskTests + + case class ArticleId(id: Int) + case class Article(id: Int, content: String) { + def author: Int = id + 1 + } + + implicit object ArticleFuture extends DataSource[ArticleId, Article] { + override def name = "ArticleFuture" + override def fetchOne(id: ArticleId): Query[Option[Article]] = + Query.async((ok, fail) => { + ok(Option(Article(id.id, "An article with id " + id.id))) + }) + override def fetchMany(ids: NonEmptyList[ArticleId]): Query[Map[ArticleId, Article]] = { + batchingNotSupported(ids) + } + } + + def article(id: Int): Fetch[Article] = Fetch(ArticleId(id)) + + case class AuthorId(id: Int) + case class Author(id: Int, name: String) + + implicit object AuthorFuture extends DataSource[AuthorId, Author] { + override def name = "AuthorFuture" + override def fetchOne(id: AuthorId): Query[Option[Author]] = + Query.async((ok, fail) => { + ok(Option(Author(id.id, "@egg" + id.id))) + }) + override def fetchMany(ids: NonEmptyList[AuthorId]): Query[Map[AuthorId, Author]] = { + batchingNotSupported(ids) + } + } + + def author(a: Article): Fetch[Author] = Fetch(AuthorId(a.author)) + + "We can interpret an async fetch into a task" in { + val fetch: Fetch[Article] = article(1) + val task: Task[Article] = Fetch.run(fetch) + val fut: Future[Article] = task.runAsync + fut.map(_ shouldEqual Article(1, "An article with id 1")) + } + + "We can combine several async data sources and interpret a fetch into a future" in { + val fetch: Fetch[(Article, Author)] = for { + art <- article(1) + author <- author(art) + } yield (art, author) + + val task: Task[(Article, Author)] = Fetch.run(fetch) + val fut: Future[(Article, Author)] = task.runAsync + + fut.map(_ shouldEqual (Article(1, "An article with id 1"), Author(2, "@egg2"))) + } + + "We can use combinators in a for comprehension and interpret a fetch from async sources into a future" in { + val fetch: Fetch[List[Article]] = for { + articles <- Fetch.traverse(List(1, 1, 2))(article) + } yield articles + + val task: Task[List[Article]] = Fetch.run(fetch) + val fut: Future[List[Article]] = task.runAsync + + fut.map( + _ shouldEqual List( + Article(1, "An article with id 1"), + Article(1, "An article with id 1"), + Article(2, "An article with id 2") + ) + ) + } + + "We can use combinators and multiple sources in a for comprehension and interpret a fetch from async sources into a future" in { + val fetch = for { + articles <- Fetch.traverse(List(1, 1, 2))(article) + authors <- Fetch.traverse(articles)(author) + } yield (articles, authors) + + val task: Task[(List[Article], List[Author])] = Fetch.run(fetch, InMemoryCache.empty) + val fut: Future[(List[Article], List[Author])] = task.runAsync + + fut.map( + _ shouldEqual ( + List( + Article(1, "An article with id 1"), + Article(1, "An article with id 1"), + Article(2, "An article with id 2") + ), + List( + Author(2, "@egg2"), + Author(2, "@egg2"), + Author(3, "@egg3") + ) + ) + ) + } +} diff --git a/shared/src/main/scala/datasource.scala b/shared/src/main/scala/datasource.scala index fd442231..34bab402 100644 --- a/shared/src/main/scala/datasource.scala +++ b/shared/src/main/scala/datasource.scala @@ -17,8 +17,8 @@ package fetch import cats.data.NonEmptyList - import cats.std.list._ +import cats.syntax.functor._ import cats.syntax.traverse._ /** @@ -46,21 +46,20 @@ trait DataSource[I, A] { */ def fetchMany(ids: NonEmptyList[I]): Query[Map[I, A]] - // FIXME: query can be applicative? - // /** Use `fetchOne` for implementing of `fetchMany`. Use only when the data - // * source doesn't support batching. - // */ - // def batchingNotSupported(ids: NonEmptyList[I]): Query[Map[I, A]] = { - // val idsList = ids.unwrap - // idsList - // .map(fetchOne) - // .sequence - // .map(results => { - // (idsList zip results) - // .collect({ - // case (id, Some(result)) => (id, result) - // }) - // .toMap - // }) - // } + /** Use `fetchOne` for implementing of `fetchMany`. Use only when the data + * source doesn't support batching. + */ + def batchingNotSupported(ids: NonEmptyList[I]): Query[Map[I, A]] = { + val idsList = ids.unwrap + idsList + .map(fetchOne) + .sequence + .map(results => { + (idsList zip results) + .collect({ + case (id, Some(result)) => (id, result) + }) + .toMap + }) + } } diff --git a/shared/src/main/scala/fetch.scala b/shared/src/main/scala/fetch.scala index 93e0d82a..9be36707 100644 --- a/shared/src/main/scala/fetch.scala +++ b/shared/src/main/scala/fetch.scala @@ -18,29 +18,42 @@ package fetch import scala.collection.immutable.Map -import cats.{Applicative, Monad, ApplicativeError, MonadError, ~>} +import cats.{Applicative, Monad, ApplicativeError, MonadError, ~>, Eval} import cats.data.{StateT, Const, NonEmptyList} import cats.free.{Free} import cats.std.list._ import cats.std.option._ import cats.syntax.traverse._ -import scala.concurrent.duration._ +import scala.concurrent.duration.Duration sealed trait Query[A] extends Product with Serializable -final case class Now[A](a: A) extends Query[A] -final case class Later[A](a: () => A) extends Query[A] -final case class Async[A](action: (Query.Callback[A], Query.Errback) => Unit) extends Query[A] // todo: timeout + +/** A query that can be satisfied synchronously. **/ +final case class Sync[A](action: Eval[A]) extends Query[A] + +/** A query that can only be satisfied asynchronously. **/ +final case class Async[A](action: (Query.Callback[A], Query.Errback) => Unit, timeout: Duration) + extends Query[A] + +final case class Ap[A, B](ff: Query[A => B], fa: Query[A]) extends Query[B] object Query { type Callback[A] = A => Unit type Errback = Throwable => Unit - def apply[A](x: A): Query[A] = Now(x) - def now[A](x: A): Query[A] = Now(x) - def later[A](th: => A): Query[A] = Later(th _) + def apply[A](x: A): Query[A] = Sync(Eval.now(x)) + def now[A](x: A): Query[A] = Sync(Eval.now(x)) + def later[A](th: => A): Query[A] = Sync(Eval.later(th)) def async[A]( - action: (Callback[A], Errback) => Unit //, timeout: FiniteDuration - ): Query[A] = Async(action) + action: (Callback[A], Errback) => Unit, + timeout: Duration = Duration.Inf + ): Query[A] = Async(action, timeout) + + implicit val fetchQueryApplicative: Applicative[Query] = new Applicative[Query] { + def pure[A](x: A): Query[A] = Sync(Eval.now(x)) + def ap[A, B](ff: Query[A => B])(fa: Query[A]): Query[B] = + Ap(ff, fa) + } } /** Requests in Fetch Free monad. @@ -86,8 +99,7 @@ final case class Concurrent(as: List[FetchRequest[_, _]]) extends FetchOp[DataSo final case class FetchError[A](err: Throwable) extends FetchOp[A] object `package` { - type DataSourceName = String - + type DataSourceName = String type DataSourceIdentity = (DataSourceName, Any) type Fetch[A] = Free[FetchOp, A] @@ -96,6 +108,10 @@ object `package` { def runQuery[A](q: Query[A]): M[A] } + object FetchMonadError { + def apply[M[_]](implicit ME: FetchMonadError[M]): FetchMonadError[M] = ME + } + type FetchInterpreter[M[_]] = { type f[x] = StateT[M, FetchEnv, x] } diff --git a/shared/src/main/scala/implicits.scala b/shared/src/main/scala/implicits.scala index d268cbef..6b95fbd1 100644 --- a/shared/src/main/scala/implicits.scala +++ b/shared/src/main/scala/implicits.scala @@ -24,9 +24,8 @@ object implicits { implicit ec: ExecutionContext ): FetchMonadError[Future] = new FetchMonadError[Future] { override def runQuery[A](j: Query[A]): Future[A] = j match { - case Now(x) => Future.successful(x) - case Later(x) => Future({ x() }) - case Async(ac) => { + case Sync(e) => pureEval(e) + case Async(ac, timeout) => { val p = Promise[A]() ec.execute(new Runnable { @@ -35,6 +34,12 @@ object implicits { p.future } + case Ap(qf, qx) => + runQuery(qf) + .zip(runQuery(qx)) + .map({ + case (f, x) => f(x) + }) } def pure[A](x: A): Future[A] = Future.successful(x) def handleErrorWith[A](fa: Future[A])(f: Throwable => Future[A]): Future[A] = diff --git a/shared/src/main/scala/syntax.scala b/shared/src/main/scala/syntax.scala index e1463329..e407f8e8 100644 --- a/shared/src/main/scala/syntax.scala +++ b/shared/src/main/scala/syntax.scala @@ -46,5 +46,14 @@ object syntax { def runA[M[_]: FetchMonadError]: M[A] = Fetch.run[M](fa, InMemoryCache.empty) + + def runF[M[_]: FetchMonadError](cache: DataSourceCache): M[(FetchEnv, A)] = + Fetch.runFetch[M](fa, cache) + + def runE[M[_]: FetchMonadError](cache: DataSourceCache): M[FetchEnv] = + Fetch.runEnv[M](fa, cache) + + def runA[M[_]: FetchMonadError](cache: DataSourceCache): M[A] = + Fetch.run[M](fa, cache) } } diff --git a/shared/src/test/FetchAsyncQueryTests.scala b/shared/src/test/scala/FetchAsyncQueryTests.scala similarity index 93% rename from shared/src/test/FetchAsyncQueryTests.scala rename to shared/src/test/scala/FetchAsyncQueryTests.scala index 8a8a9d03..17877bf8 100644 --- a/shared/src/test/FetchAsyncQueryTests.scala +++ b/shared/src/test/scala/FetchAsyncQueryTests.scala @@ -40,9 +40,7 @@ class FetchAsyncQueryTests extends AsyncFreeSpec with Matchers { ok(Option(Article(id.id, "An article with id " + id.id))) }) override def fetchMany(ids: NonEmptyList[ArticleId]): Query[Map[ArticleId, Article]] = { - Query.async((ok, fail) => { - ok(ids.unwrap.map(tid => (tid, Article(tid.id, "An article with id " + tid.id))).toMap) - }) + batchingNotSupported(ids) } } @@ -58,9 +56,7 @@ class FetchAsyncQueryTests extends AsyncFreeSpec with Matchers { ok(Option(Author(id.id, "@egg" + id.id))) }) override def fetchMany(ids: NonEmptyList[AuthorId]): Query[Map[AuthorId, Author]] = { - Query.async((ok, fail) => { - ok(ids.unwrap.map(tid => (tid, Author(tid.id, "@egg" + tid.id))).toMap) - }) + batchingNotSupported(ids) } } From c7f0c3af82cfa585fbf7312a38361c63482a597f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Wed, 1 Jun 2016 21:46:42 +0200 Subject: [PATCH 17/40] Update docs --- README.md | 17 +- docs/src/tut/docs.md | 607 +++++++++++++++++++++++++++++++++++++++++- docs/src/tut/index.md | 4 +- 3 files changed, 607 insertions(+), 21 deletions(-) diff --git a/README.md b/README.md index 627562b7..13bb52b0 100644 --- a/README.md +++ b/README.md @@ -109,7 +109,7 @@ Since we calculated the results eagerly using `Task#now`, we can run this fetch ```scala import scala.concurrent.duration._ -// [198] One ToString 1 +// [152] One ToString 1 // import scala.concurrent.duration._ Await.result(result, Duration.Inf) @@ -119,7 +119,6 @@ Await.result(result, Duration.Inf) As you can see in the previous example, the `ToStringSource` is queried once to get the value of 1. - ```scala import scala.concurrent._ // import scala.concurrent._ @@ -140,7 +139,7 @@ import cats.syntax.cartesian._ // import cats.syntax.cartesian._ val fetchThree: Fetch[(String, String, String)] = (fetchString(1) |@| fetchString(2) |@| fetchString(3)).tupled -// fetchThree: fetch.Fetch[(String, String, String)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List(2, 3)),ToStringSource$@3cd0bf6b)))),),) +// fetchThree: fetch.Fetch[(String, String, String)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List(2, 3)),ToStringSource$@183bd58)))),),) val result: Future[(String, String, String)] = fetchThree.runA[Future] // result: scala.concurrent.Future[(String, String, String)] = List() @@ -151,7 +150,7 @@ When executing the above fetch, note how the three identities get batched and th ```scala await(result) -// [188] Many ToString OneAnd(1,List(2, 3)) +// [152] Many ToString OneAnd(1,List(2, 3)) // res4: (String, String, String) = (1,2,3) ``` @@ -186,18 +185,18 @@ And now we can easily receive data from the two sources in a single fetch. ```scala scala> val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled -fetchMulti: fetch.Fetch[(String, Int)] = Gosub(Gosub(Suspend(Concurrent(List(FetchOne(1,ToStringSource$@3cd0bf6b), FetchOne(one,LengthSource$@cf8c35b)))),),) +fetchMulti: fetch.Fetch[(String, Int)] = Gosub(Gosub(Suspend(Concurrent(List(FetchOne(1,ToStringSource$@183bd58), FetchOne(one,LengthSource$@1d6700b5)))),),) scala> val result = fetchMulti.runA[Future] -[199] One ToString 1 result: scala.concurrent.Future[(String, Int)] = List() +[163] One ToString 1 ``` Note how the two independent data fetches are run in parallel, minimizing the latency cost of querying the two data sources. ```scala scala> await(result) -[198] One Length one +[152] One Length one res5: (String, Int) = (1,3) ``` @@ -210,13 +209,13 @@ scala> val fetchTwice: Fetch[(String, String)] = for { | one <- fetchString(1) | two <- fetchString(1) | } yield (one, two) -fetchTwice: fetch.Fetch[(String, String)] = Gosub(Suspend(FetchOne(1,ToStringSource$@3cd0bf6b)),) +fetchTwice: fetch.Fetch[(String, String)] = Gosub(Suspend(FetchOne(1,ToStringSource$@183bd58)),) ``` While running it, notice that the data source is only queried once. The next time the identity is requested it's served from the cache. ```scala scala> val result: (String, String) = await(fetchTwice.runA[Future]) -[199] One ToString 1 +[152] One ToString 1 result: (String, String) = (1,1) ``` diff --git a/docs/src/tut/docs.md b/docs/src/tut/docs.md index 245dd971..c296b2cd 100644 --- a/docs/src/tut/docs.md +++ b/docs/src/tut/docs.md @@ -131,7 +131,7 @@ implicit object UserSource extends DataSource[UserId, User]{ Now that we have a data source we can write a function for fetching users given an id, we just have to pass a `UserId` as an argument to `Fetch`. -```tut +```tut:silent def getUser(id: UserId): Fetch[User] = Fetch(id) // or, more explicitly: Fetch(id)(UserSource) ``` @@ -140,10 +140,10 @@ def getUser(id: UserId): Fetch[User] = Fetch(id) // or, more explicitly: Fetch(i If your data source doesn't support batching, you can use the `DataSource#batchingNotSupported` method as the implementation of `fetchMany`. Note that it will use the `fetchOne` implementation for requesting identities one at a time. -```scala +```tut:silent implicit object UnbatchedSource extends DataSource[Int, Int]{ override def fetchOne(id: Int): Query[Option[Int]] = { - Query(Option(id)) + Query.now(Option(id)) } override def fetchMany(ids: NonEmptyList[Int]): Query[Map[Int, Int]] = { batchingNotSupported(ids) @@ -157,23 +157,33 @@ We are now ready to create and run fetches. Note the distinction between Fetch c When we are creating and combining `Fetch` values, we are just constructing a recipe of our data dependencies. -```tut:book +```tut:silent val fetchUser: Fetch[User] = getUser(1) ``` -A `Fetch` is just a value, and in order to be able to execute it we need to run it to a `Future` first. Running `fetchUser` will give as a `Future[User]`. +A `Fetch` is just a value, and in order to be able to execute it we need to run with a concurrency monad first. We'll run `fetchUser` using `Future` as our target monad, so let's do some standard imports first. -```tut:book +```tut:silent import scala.concurrent._ import ExecutionContext.Implicits.global import fetch.implicits._ import fetch.syntax._ +``` + +Let's pretend we have a function from `Future[A]` to `A` called `await`. Note that we can't implement it in Scala.js. + +```tut:silent +import scala.concurrent.duration._ -val user: Future[User] = fetchUser.runA[Future] +def await[A](t: Future[A]): A = Await.result(t, Duration.Inf) ``` -TODO +We can now run the fetch to a future and see its result: + +```tut:book +await(fetchUser.runA[Future]) +``` ### Sequencing @@ -189,8 +199,585 @@ val fetchTwoUsers: Fetch[(User, User)] = for { When composing fetches with `flatMap` we are telling Fetch that the second one depends on the previous one, so it isn't able to make any optimizations. When running the above fetch, we will query the user data source in two rounds: one for the user with id 1 and another for the user with id 2. ```tut:book -val result: Future[(User, User)] = fetchTwoUsers.runA[Future] +await(fetchTwoUsers.runA[Future]) +``` + +### Batching + +If we combine two independent requests to the same data source, Fetch will +automatically batch them together into a single request. Applicative operations like the product of two fetches +help us tell the library that those fetches are independent, and thus can be batched if they use the same data source: + +```tut:silent +import cats.syntax.cartesian._ + +val fetchProduct: Fetch[(User, User)] = getUser(1).product(getUser(2)) +``` + +Note how both ids (1 and 2) are requested in a single future to the data source when executing the fetch. + +```tut:book +await(fetchProduct.runA[Future]) +``` + +### Deduplication + +If two independent requests ask for the same identity, Fetch will detect it and deduplicate the id. + +```tut:silent +val fetchDuped: Fetch[(User, User)] = getUser(1).product(getUser(1)) +``` + +Note that when running the fetch, the identity 1 is only requested once even when it is needed by both fetches. + +```tut:book +await(fetchDuped.runA[Future]) +``` + +### Caching + +During the execution of a fetch, previously requested results are implicitly cached. This allows us to write +fetches in a very modular way, asking for all the data they need as if it +was in memory; furthermore, it also avoids re-fetching an identity that may have changed +during the course of a fetch execution, which can lead to inconsistencies in the data. + +```tut:silent +val fetchCached: Fetch[(User, User)] = for { + aUser <- getUser(1) + anotherUser <- getUser(1) +} yield (aUser, anotherUser) +``` + +The above fetch asks for the same identity multiple times. Let's see what happens when executing it. + +```tut:book +await(fetchCached.runA[Future]) +``` + +As you can see, the `User` with id 1 was fetched only once in a single round-trip. The next +time it was needed we used the cached versions, thus avoiding another request to the user data +source. + +## Combining data from multiple sources + +Now that we know about some of the optimizations that Fetch can perform to read data efficiently, +let's look at how we can combine more than one data source. + + +Imagine that we are rendering a blog and have the following types for posts: + +```tut:silent +type PostId = Int +case class Post(id: PostId, author: UserId, content: String) +``` + +As you can see, every `Post` has an author, but it refers to the author by its id. We'll implement a data source for retrieving a post given a post id. + +```tut:silent +val postDatabase: Map[PostId, Post] = Map( + 1 -> Post(1, 2, "An article"), + 2 -> Post(2, 3, "Another article"), + 3 -> Post(3, 4, "Yet another article") +) + +implicit object PostSource extends DataSource[PostId, Post]{ + override def fetchOne(id: PostId): Query[Option[Post]] = { + Query.later({ + println(s"Fetching one post $id") + postDatabase.get(id) + }) + } + override def fetchMany(ids: NonEmptyList[PostId]): Query[Map[PostId, Post]] = { + Query.later({ + println(s"Fetching many posts $ids") + postDatabase.filterKeys(ids.unwrap.contains) + }) + } +} + +def getPost(id: PostId): Fetch[Post] = Fetch(id) +``` + +We can also implement a function for fetching a post's author given a post: + +```tut:silent +def getAuthor(p: Post): Fetch[User] = Fetch(p.author) +``` + +Now that we have multiple sources let's mix them in the same fetch. + +```tut:silent +val fetchMulti: Fetch[(Post, User)] = for { + post <- getPost(1) + user <- getAuthor(post) +} yield (post, user) +``` + +We can now run the previous fetch, querying the posts data source first and the user data source afterwards. + +```tut:book +await(fetchMulti.runA[Future]) +``` + +In the previous example, we fetched a post given its id and then fetched its author. This +data could come from entirely different places, but Fetch makes working with heterogeneous sources +of data very easy. + +### Concurrency + +Combining multiple independent requests to the same data source can have two outcomes: + + - if the data sources are the same, the request is batched + - otherwise, both data sources are queried at the same time + +In the following example we are fetching from different data sources so both requests will be +evaluated together. + +```tut:silent +val fetchConcurrent: Fetch[(Post, User)] = getPost(1).product(getUser(2)) +``` + +The above example combines data from two different sources, and the library knows they are independent. + +```tut:book +await(fetchConcurrent.runA[Future]) ``` -Althought `fetchTwoUsers` needs two rounds to complete we still can execute it synchronously: +Since we are interpreting the fetch to the `Id` monad, that doesn't give us any parallelism; the fetches +will be run sequentially. However, if we interpret it to a `Future` each request will run in its own logical +thread. + +## Combinators + +Besides `flatMap` for sequencing fetches and `product` for running them concurrently, Fetch provides a number of +other combinators. + +### Sequence + +Whenever we have a list of fetches of the same type and want to run them concurrently, we can use the `sequence` +combinator. It takes a `List[Fetch[A]]` and gives you back a `Fetch[List[A]]`, batching the fetches to the same +data source and running fetches to different sources in parallel. Note that the `sequence` combinator is more general and works not only on lists but on any type that has a [Traverse](http://typelevel.org/cats/tut/traverse.html) instance. + +```tut:silent +import cats.std.list._ +import cats.syntax.traverse._ + +val fetchSequence: Fetch[List[User]] = List(getUser(1), getUser(2), getUser(3)).sequence +``` + +Since `sequence` uses applicative operations internally, the library is able to perform optimizations across all the sequenced fetches. + +```tut:book +await(fetchSequence.runA[Future]) +``` + +As you can see, requests to the user data source were batched, thus fetching all the data in one round. + +### Traverse + +Another interesting combinator is `traverse`, which is the composition of `map` and `sequence`. + +```tut:silent +val fetchTraverse: Fetch[List[User]] = List(1, 2, 3).traverse(getUser) +``` + +As you may have guessed, all the optimizations made by `sequence` still apply when using `traverse`. + +```tut:book +await(fetchTraverse.runA[Future]) +``` + +# Queries + +## Synchronous + +## Asynchronous + +# Caching + +As we have learned, Fetch caches intermediate results implicitly using a cache. You can +provide a prepopulated cache for running a fetch, replay a fetch with the cache of a previous +one, and even implement a custom cache. + +## Prepopulating a cache + +We'll be using the default in-memory cache, prepopulated with some data. The cache key of an identity +is calculated with the `DataSource`'s `identity` method. + +```tut:book +val cache = InMemoryCache(UserSource.identity(1) -> User(1, "@dialelo")) +``` + +We can pass a cache as the second argument when running a fetch with `Fetch.run`. + +```tut:book +await(fetchUser.runA[Future](cache)) +``` + +As you can see, when all the data is cached, no future to the data sources is executed since the results are available +in the cache. + +```tut:silent +val fetchManyUsers: Fetch[List[User]] = List(1, 2, 3).traverse(getUser) +``` + +If only part of the data is cached, the cached data won't be asked for: + +```tut:book +await(fetchManyUsers.runA[Future](cache)) +``` + +## Replaying a fetch without querying any data source + +When running a fetch, we are generally interested in its final result. However, we also have access to the cache +and information about the executed rounds once we run a fetch. Fetch's interpreter keeps its state in an environment +(implementing the `Env` trait), and we can get both the environment and result after running a fetch using `Fetch.runFetch` +instead of `Fetch.run` or `value.runF` via it's implicit syntax. + +Knowing this, we can replay a fetch reusing the cache of a previous one. The replayed fetch won't have to call any of the +data sources. + +```tut:book +val populatedCache = await(fetchManyUsers.runE[Future].map(_.cache)) + +await(fetchManyUsers.runA[Future](populatedCache)) +``` + +## Implementing a custom cache + +The default cache is implemented as an immutable in-memory map, but users are free to use their own caches when running a fetch. Your cache should implement the `DataSourceCache` trait, and after that you can pass it to Fetch's `run` methods. + +There is no need for the cache to be mutable since fetch executions run in an interpreter that uses the state monad. Note that the `update` method in the `DataSourceCache` trait yields a new, updated cache. + +```scala +trait DataSourceCache { + def update[A](k: DataSourceIdentity, v: A): DataSourceCache + def get(k: DataSourceIdentity): Option[Any] +} +``` + +Let's implement a cache that forgets everything we store in it. + +```tut:silent +final case class ForgetfulCache() extends DataSourceCache { + override def get(k: DataSourceIdentity): Option[Any] = None + override def update[A](k: DataSourceIdentity, v: A): ForgetfulCache = this +} +``` + +We can now use our implementation of the cache when running a fetch. + +```tut:book +val fetchSameTwice: Fetch[(User, User)] = for { + one <- getUser(1) + another <- getUser(1) +} yield (one, another) + +await(fetchSameTwice.runA[Future](ForgetfulCache())) +``` + +# Error handling + +Fetch is used for reading data from remote sources and the queries we perform can and will fail at some point. What happens if we run a fetch and fails? We'll create a fetch that always fails to learn about it. + +```tut:silent +val fetchError: Fetch[User] = (new Exception("Oh noes")).fetch +``` + +If we try to execute and block for its value the exception will be thrown. + +```tut:fail +await(fetchError.runA[Future]) +``` + +We can use the `FetchMonadError[Future]#attempt` to convert a fetch result into a disjuntion and avoid throwing exceptions. Fetch provides an implicit instance of `FetchMonadError[Future]` that we can import from `fetch.implicits._` to have it available. + +```tut:silent +import fetch.implicits._ +``` + +Now we can convert `Future[User]` into `Future[Throwable Xor User]` and capture exceptions as values in the left of the disjunction. + +```tut:book +import cats.data.Xor + +val safeResult: Future[Throwable Xor User] = FetchMonadError[Future].attempt(fetchError.runA) +val finalValue: Throwable Xor User = await(safeResult) +``` + +And more succintly with Cat's applicative error syntax. + +```tut:book +import cats.syntax.applicativeError._ + +await(fetchError.runA[Future].attempt) +``` + + +## Missing identities + +You've probably noticed that `DataSource.fetch` takes a list of identities and returns a map of identities to their result, taking +into account the possibility of some identities not being found. Whenever an identity cannot be found, the fetch execution will +fail. + +Whenever a fetch fails, a `FetchFailure` exception is thrown. The `FetchFailure` will have the environment, which gives you information +about the execution of the fetch. + +# Syntax + +## Implicit syntax + +Fetch provides implicit syntax to lift any value to the context of a `Fetch` in addition to the most common used +combinators active within `Fetch` instances. + +### pure + +Plain values can be lifted to the Fetch monad with `value.fetch`: + +```tut:silent +val fetchPure: Fetch[Int] = 42.fetch +``` + +Executing a pure fetch doesn't query any data source, as expected. + +```tut:book +await(fetchPure.runA[Future]) +``` + +### error + +Errors can also be lifted to the Fetch monad via `exception.fetch`. + +```tut:silent +val fetchFail: Fetch[Int] = Fetch.error(new Exception("Something went terribly wrong")) +``` + +Note that interpreting an errorful fetch to `Future` and blocking for its result will throw the exception. + +```tut:fail +await(fetchFail.runA[Future]) +``` + +### join + +We can compose two independent fetches with `fetch1.join(fetch2)`. + +```tut:silent +val fetchJoined: Fetch[(Post, User)] = getPost(1).join(getUser(2)) +``` + +If the fetches are to the same data source they will be batched; if they aren't, they will be evaluated at the same time. + +```tut:book +await(fetchJoined.runA) +``` + +### runA + +Run directly any fetch to a `Future` with `fetch1.runA`. + +```tut:book +await(getPost(1).runA) +``` + +### runE + +Extract a fetch an get it's runtime environment `fetch1.runE`. + +```tut:book +await(getPost(1).runE) +``` + +### runF + +Run a fetch obtaining the environment and final value `fetch1.runF`. + +```tut:book +await(getPost(1).runF) +``` + +## Companion object + +We've been using cats' syntax and `fetch.syntax` throughout the examples since it's more concise and general than the +methods in the `Fetch` companion object. However, you can use the methods in the companion object +directly. + +Note that using cats syntax gives you a plethora of combinators, much richer that what the companion object provides. + +### pure + +Plain values can be lifted to the Fetch monad with `Fetch#pure`: + +```tut:silent +val fetchPure: Fetch[Int] = Fetch.pure(42) +``` + +Executing a pure fetch doesn't query any data source, as expected. + +```tut:book +await(Fetch.run[Future](fetchPure)) +``` + +### error + +Errors can also be lifted to the Fetch monad via `Fetch#error`. + +```tut:silent +val fetchFail: Fetch[Int] = Fetch.error(new Exception("Something went terribly wrong")) +``` + +Note that interpreting an errorful fetch to `Future` won't throw the exception until we block for its result it. + +```tut:fail +await(Fetch.run(fetchFail)) +``` + +### join + +We can compose two independent fetches with `Fetch#join`. + +```tut:silent +val fetchJoined: Fetch[(Post, User)] = Fetch.join(getPost(1), getUser(2)) +``` + +If the fetches are to the same data source they will be batched; if they aren't, they will be evaluated at the same time. + +```tut:book +await(Fetch.run(fetchJoined)) +``` + +### sequence + +The `Fetch#sequence` combinator turns a `List[Fetch[A]]` into a `Fetch[List[A]]`, running all the fetches concurrently +and batching when possible. + +```tut:silent +val fetchSequence: Fetch[List[User]] = Fetch.sequence(List(getUser(1), getUser(2), getUser(3))) +``` + +Note that `Fetch#sequence` is not as general as the `sequence` method from `Traverse`, but performs the same optimizations. + +```tut:book +await(Fetch.run(fetchSequence)) +``` + +### traverse + +The `Fetch#traverse` combinator is a combination of `map` and `sequence`. + +```tut:silent +val fetchTraverse: Fetch[List[User]] = Fetch.traverse(List(1, 2, 3))(getUser) +``` + +Note that `Fetch#traverse` is not as general as the `traverse` method from `Traverse`, but performs the same optimizations. + +```tut:book +await(Fetch.run(fetchTraverse)) +``` + +## cats + +Fetch is built using cats' Free monad construction and thus works out of the box with +cats syntax. Using cats' syntax, we can make fetch declarations more concise, without +the need to use the combinators in the `Fetch` companion object. + +Fetch provides its own instance of `Applicative[Fetch]`. Whenever we use applicative +operations on more than one `Fetch`, we know that the fetches are independent meaning +we can perform optimizations such as batching and concurrent requests. + +If we were to use the default `Applicative[Fetch]` operations, which are implemented in terms of `flatMap`, +we wouldn't have information about the independency of multiple fetches. + +### Applicative + +The `|@|` operator allows us to combine multiple independent fetches, even when they +are from different types, and apply a pure function to their results. We can use it +as a more powerful alternative to the `product` method or `Fetch#join`: + +```tut:silent +import cats.syntax.cartesian._ + +val fetchThree: Fetch[(Post, User, Post)] = (getPost(1) |@| getUser(2) |@| getPost(2)).tupled +``` + +Notice how the queries to posts are batched. + +```tut:book +await(fetchThree.runA) +``` + +More interestingly, we can use it to apply a pure function to the results of various +fetches. + +```tut:book +val fetchFriends: Fetch[String] = (getUser(1) |@| getUser(2)).map({ (one, other) => + s"${one.username} is friends with ${other.username}" +}) + +await(fetchFriends.runA) +``` + +The above example is equivalent to the following using the `Fetch#join` method: + +```tut:book +val fetchFriends: Fetch[String] = Fetch.join(getUser(1), getUser(2)).map({ case (one, other) => + s"${one.username} is friends with ${other.username}" +}) + +await(fetchFriends.runA) +``` + +# Concurrency + +Fetch lets you choose the concurrency monad you want for running fetches, supporting the Scala and Scala.js +standard library concurrency primitives. However not everyone is using `Future` and Fetch acknowledges it, +providing support for the most widespread concurrency monads and making it easy for users to run a fetch to a +custom type. + +## Future + +As you have learned through the examples, you can run a fetch into a `Future` simply by importing `fetch.implicits`. It +contains an instance of `FetchMonadError[Future]` given that you provide an implicit `ExecutionContext`. + +Futures are available both in Scala and Scala.js although in the examples we have used some APIs that are exclusive +to Scala (`Thread` and `Await`) for educational purposes. + +## Twitter Future + +TODO + +## Scalaz task + +TODO + +## Monix Task + +Scala and Scala.js + +### JVM + +Scheduler for io in JVM + +### JS + +## Eval + +Unsafe, for testing purposes, jvm only + +# Resources + +- [Code](https://github.com/47deg/fetch) on GitHub. +- [Documentation site](http://47deg.github.io/fetch/) +- [Fetch: Simple & Efficient data access](https://www.youtube.com/watch?v=45fcKYFb0EU) talk at [Typelevel Summit in Oslo](http://typelevel.org/event/2016-05-summit-oslo/) + +# Acknowledgements + +Fetch stands on the shoulders of giants: + +- [Haxl](https://github.com/facebook/haxl) is Facebook's implementation (Haskell) of the [original paper Fetch is based on](http://community.haskell.org/~simonmar/papers/haxl-icfp14.pdf). +- [Clump](http://getclump.io) has inspired the signature of the `DataSource#fetch` method. +- [Stitch](https://engineering.twitter.com/university/videos/introducing-stitch) is an in-house Twitter library that is not open source but has inspired Fetch's high-level API. +- [Cats](http://typelevel.org/cats/), a library for functional programming in Scala. +- [Monix](https://monix.io) high-performance and multiplatform (Scala / Scala.js) asynchronous programming library. + diff --git a/docs/src/tut/index.md b/docs/src/tut/index.md index 3c40143d..cfab08d0 100644 --- a/docs/src/tut/index.md +++ b/docs/src/tut/index.md @@ -142,13 +142,13 @@ implicit object LengthSource extends DataSource[String, Int]{ override def fetchOne(id: String): Query[Option[Int]] = { Query.async((ok, fail) => { println(s"[${Thread.currentThread.getId}] One Length $id") - ok(Option(id.size)) + ok((Option(id.size))) }) } override def fetchMany(ids: NonEmptyList[String]): Query[Map[String, Int]] = { Query.async((ok, fail) => { println(s"[${Thread.currentThread.getId}] Many Length $ids") - ok(ids.unwrap.map(i => (i, i.size)).toMap) + ok(ids.unwrap.map(i => (i, i.size)).toMap) }) } } From 1a4d6ff6c64de207fafab62c6a958818ef1d632f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Wed, 1 Jun 2016 23:28:50 +0200 Subject: [PATCH 18/40] Recover FetchMonadError[Eval]#runQuery implementation --- jvm/src/main/scala/implicits.scala | 28 +++++++++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/jvm/src/main/scala/implicits.scala b/jvm/src/main/scala/implicits.scala index 129637a0..2901f568 100644 --- a/jvm/src/main/scala/implicits.scala +++ b/jvm/src/main/scala/implicits.scala @@ -24,9 +24,31 @@ import scala.concurrent.duration._ object unsafeImplicits { implicit val fetchEvalFetchMonadError: FetchMonadError[Eval] = new FetchMonadError[Eval] { - override def runQuery[A](j: Query[A]): Eval[A] = { - // TODO - ??? + override def runQuery[A](j: Query[A]): Eval[A] = j match { + case Sync(e) => e + case Ap(qf, qx) => ap(runQuery(qf))(runQuery(qx)) + case Async(action, timeout) => + Eval.later { + val latch = new java.util.concurrent.CountDownLatch(1) + @volatile var result: Xor[Throwable, A] = null + new Thread( + new Runnable { + def run() = { + action(a => { + result = Xor.Right(a); + latch.countDown + }, err => { + result = Xor.Left(err); + latch.countDown + }) + } + }).start() + latch.await + result match { + case Xor.Left(err) => throw err + case Xor.Right(v) => v + } + } } def pure[A](x: A): Eval[A] = Eval.now(x) def handleErrorWith[A](fa: Eval[A])(f: Throwable => Eval[A]): Eval[A] = From 6bde038939fec001ffd1e5108611643b168bcc3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sat, 4 Jun 2016 13:58:42 +0200 Subject: [PATCH 19/40] FetchMonadError[Id] --- jvm/src/main/scala/implicits.scala | 47 +++++++++++++++++++++++++++--- 1 file changed, 43 insertions(+), 4 deletions(-) diff --git a/jvm/src/main/scala/implicits.scala b/jvm/src/main/scala/implicits.scala index 2901f568..8a939ff0 100644 --- a/jvm/src/main/scala/implicits.scala +++ b/jvm/src/main/scala/implicits.scala @@ -14,16 +14,18 @@ * limitations under the License. */ -package fetch +package fetch.unsafe -import cats.Eval +import fetch._ + +import cats.{Id, Eval} import cats.data.Xor import scala.concurrent._ import scala.concurrent.duration._ -object unsafeImplicits { - implicit val fetchEvalFetchMonadError: FetchMonadError[Eval] = new FetchMonadError[Eval] { +object implicits { + implicit val evalFetchMonadError: FetchMonadError[Eval] = new FetchMonadError[Eval] { override def runQuery[A](j: Query[A]): Eval[A] = j match { case Sync(e) => e case Ap(qf, qx) => ap(runQuery(qf))(runQuery(qx)) @@ -63,4 +65,41 @@ object unsafeImplicits { def flatMap[A, B](fa: Eval[A])(f: A => Eval[B]): Eval[B] = fa.flatMap(f) } + + implicit val idFetchMonadError: FetchMonadError[Id] = new FetchMonadError[Id] { + override def runQuery[A](j: Query[A]): Id[A] = j match { + case Sync(e) => e.value + case Ap(qf, qx) => ap(runQuery(qf))(runQuery(qx)) + case Async(action, timeout) => { + val latch = new java.util.concurrent.CountDownLatch(1) + @volatile var result: Xor[Throwable, A] = null + new Thread( + new Runnable { + def run() = { + action(a => { + result = Xor.Right(a); + latch.countDown + }, err => { + result = Xor.Left(err); + latch.countDown + }) + } + }).start() + latch.await + result match { + case Xor.Left(err) => throw err + case Xor.Right(v) => v + } + } + } + def pure[A](x: A): Id[A] = x + def handleErrorWith[A](fa: Id[A])(f: Throwable => Id[A]): Id[A] = + try { + fa + } catch { + case ex: Throwable => f(ex) + } + def raiseError[A](e: Throwable): Id[A] = throw e + def flatMap[A, B](fa: Id[A])(f: A => Id[B]): Id[B] = f(fa) + } } From f5b83ab876689483290ba30adfecae14565cead5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sat, 4 Jun 2016 13:58:51 +0200 Subject: [PATCH 20/40] Improve FetchMonadError[Future] --- shared/src/main/scala/implicits.scala | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/shared/src/main/scala/implicits.scala b/shared/src/main/scala/implicits.scala index 6b95fbd1..6d49ad5f 100644 --- a/shared/src/main/scala/implicits.scala +++ b/shared/src/main/scala/implicits.scala @@ -16,15 +16,17 @@ package fetch -import cats.Eval +import cats.{Eval, MonadError} +import cats.std.FutureInstances import scala.concurrent.{Promise, Future, ExecutionContext} -object implicits { +object implicits extends FutureInstances { implicit def fetchFutureFetchMonadError( - implicit ec: ExecutionContext + implicit ec: ExecutionContext, + ME: MonadError[Future, Throwable] ): FetchMonadError[Future] = new FetchMonadError[Future] { override def runQuery[A](j: Query[A]): Future[A] = j match { - case Sync(e) => pureEval(e) + case Sync(e) => ME.pureEval(e) case Async(ac, timeout) => { val p = Promise[A]() From a58c2516e787065792e733079281e986278b5855 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sat, 4 Jun 2016 13:59:01 +0200 Subject: [PATCH 21/40] Update docs to run fetches to Id --- docs/src/tut/docs.md | 164 ++++++++++++++++++++++++------------------ docs/src/tut/index.md | 28 ++++---- 2 files changed, 108 insertions(+), 84 deletions(-) diff --git a/docs/src/tut/docs.md b/docs/src/tut/docs.md index c296b2cd..15e54f5c 100644 --- a/docs/src/tut/docs.md +++ b/docs/src/tut/docs.md @@ -114,14 +114,14 @@ val userDatabase: Map[UserId, User] = Map( implicit object UserSource extends DataSource[UserId, User]{ override def fetchOne(id: UserId): Query[Option[User]] = { - Query.now({ - println(s"Fetching one user $id") + Query.later({ + println(s"[${Thread.currentThread.getId}] One User $id") userDatabase.get(id) }) } override def fetchMany(ids: NonEmptyList[UserId]): Query[Map[UserId, User]] = { - Query.now({ - println(s"Fetching many users $ids") + Query.later({ + println(s"[${Thread.currentThread.getId}] Many Users $ids") userDatabase.filterKeys(ids.unwrap.contains) }) } @@ -161,28 +161,28 @@ dependencies. val fetchUser: Fetch[User] = getUser(1) ``` -A `Fetch` is just a value, and in order to be able to execute it we need to run with a concurrency monad first. We'll run `fetchUser` using `Future` as our target monad, so let's do some standard imports first. +A `Fetch` is just a value, and in order to be able to get its value we need to run it to a monad first. The +target monad `M[_]` must be able to lift a `Query[A]` to `M[A]`, evaluating the query in the monad's context. -```tut:silent -import scala.concurrent._ -import ExecutionContext.Implicits.global +We'll run `fetchUser` using `Id` as our target monad, so let's do some imports first. Note that interpreting +a fetch to a non-concurrency monad like `Id` or `Eval` is only recommended for trying things out in a Scala +console, that's why for using them you need to import `fetch.unsafe.implicits`. -import fetch.implicits._ +```tut:silent +import cats.Id +import fetch.unsafe.implicits._ import fetch.syntax._ ``` -Let's pretend we have a function from `Future[A]` to `A` called `await`. Note that we can't implement it in Scala.js. - -```tut:silent -import scala.concurrent.duration._ - -def await[A](t: Future[A]): A = Await.result(t, Duration.Inf) -``` +Note that running a fetch to non-concurrency monads like `Id` or `Eval` is not supported in Scala.js. +In real-life scenarios you'll want to run your fetches to `Future` or a `Task` type provided by a library like +[Monix](https://monix.io/) or [fs2](https://github.com/functional-streams-for-scala/fs2), both of which are supported +in Fetch. -We can now run the fetch to a future and see its result: +We can now run the fetch and see its result: ```tut:book -await(fetchUser.runA[Future]) +fetchUser.runA[Id] ``` ### Sequencing @@ -199,7 +199,7 @@ val fetchTwoUsers: Fetch[(User, User)] = for { When composing fetches with `flatMap` we are telling Fetch that the second one depends on the previous one, so it isn't able to make any optimizations. When running the above fetch, we will query the user data source in two rounds: one for the user with id 1 and another for the user with id 2. ```tut:book -await(fetchTwoUsers.runA[Future]) +fetchTwoUsers.runA[Id] ``` ### Batching @@ -214,10 +214,10 @@ import cats.syntax.cartesian._ val fetchProduct: Fetch[(User, User)] = getUser(1).product(getUser(2)) ``` -Note how both ids (1 and 2) are requested in a single future to the data source when executing the fetch. +Note how both ids (1 and 2) are requested in a single query to the data source when executing the fetch. ```tut:book -await(fetchProduct.runA[Future]) +fetchProduct.runA[Id] ``` ### Deduplication @@ -231,7 +231,7 @@ val fetchDuped: Fetch[(User, User)] = getUser(1).product(getUser(1)) Note that when running the fetch, the identity 1 is only requested once even when it is needed by both fetches. ```tut:book -await(fetchDuped.runA[Future]) +fetchDuped.runA[Id] ``` ### Caching @@ -251,7 +251,7 @@ val fetchCached: Fetch[(User, User)] = for { The above fetch asks for the same identity multiple times. Let's see what happens when executing it. ```tut:book -await(fetchCached.runA[Future]) +fetchCached.runA[Id] ``` As you can see, the `User` with id 1 was fetched only once in a single round-trip. The next @@ -283,13 +283,13 @@ val postDatabase: Map[PostId, Post] = Map( implicit object PostSource extends DataSource[PostId, Post]{ override def fetchOne(id: PostId): Query[Option[Post]] = { Query.later({ - println(s"Fetching one post $id") + println(s"[${Thread.currentThread.getId}] One Posts $id") postDatabase.get(id) }) } override def fetchMany(ids: NonEmptyList[PostId]): Query[Map[PostId, Post]] = { Query.later({ - println(s"Fetching many posts $ids") + println(s"[${Thread.currentThread.getId}] Many Posts $ids") postDatabase.filterKeys(ids.unwrap.contains) }) } @@ -316,7 +316,7 @@ val fetchMulti: Fetch[(Post, User)] = for { We can now run the previous fetch, querying the posts data source first and the user data source afterwards. ```tut:book -await(fetchMulti.runA[Future]) +fetchMulti.runA[Id] ``` In the previous example, we fetched a post given its id and then fetched its author. This @@ -340,12 +340,28 @@ val fetchConcurrent: Fetch[(Post, User)] = getPost(1).product(getUser(2)) The above example combines data from two different sources, and the library knows they are independent. ```tut:book -await(fetchConcurrent.runA[Future]) +fetchConcurrent.runA[Id] ``` -Since we are interpreting the fetch to the `Id` monad, that doesn't give us any parallelism; the fetches -will be run sequentially. However, if we interpret it to a `Future` each request will run in its own logical -thread. +Since we are running the fetch to `Id`, we couldn't exploit parallelism for reading from both sources +at the same time. Let's do some imports in order to be able to run fetches to a `Future`. + +```tut:silent +import scala.concurrent._ +import ExecutionContext.Implicits.global +import scala.concurrent.duration._ +``` + +Let's see what happens when running the same fetch to a `Future`, note that you cannot block for a +future's result in Scala.js. + +```tut:book +import fetch.implicits._ + +Await.result(fetchConcurrent.runA[Future], Duration.Inf) +``` + +As you can see, each independent request ran in its own logical thread. ## Combinators @@ -368,7 +384,7 @@ val fetchSequence: Fetch[List[User]] = List(getUser(1), getUser(2), getUser(3)). Since `sequence` uses applicative operations internally, the library is able to perform optimizations across all the sequenced fetches. ```tut:book -await(fetchSequence.runA[Future]) +fetchSequence.runA[Id] ``` As you can see, requests to the user data source were batched, thus fetching all the data in one round. @@ -384,18 +400,22 @@ val fetchTraverse: Fetch[List[User]] = List(1, 2, 3).traverse(getUser) As you may have guessed, all the optimizations made by `sequence` still apply when using `traverse`. ```tut:book -await(fetchTraverse.runA[Future]) +fetchTraverse.runA[Id] ``` # Queries ## Synchronous +### Now + +### Later + ## Asynchronous # Caching -As we have learned, Fetch caches intermediate results implicitly using a cache. You can +As we have learned, Fetch caches intermediate results implicitly. You can provide a prepopulated cache for running a fetch, replay a fetch with the cache of a previous one, and even implement a custom cache. @@ -411,10 +431,16 @@ val cache = InMemoryCache(UserSource.identity(1) -> User(1, "@dialelo")) We can pass a cache as the second argument when running a fetch with `Fetch.run`. ```tut:book -await(fetchUser.runA[Future](cache)) +Fetch.run[Id](fetchUser, cache) ``` -As you can see, when all the data is cached, no future to the data sources is executed since the results are available +And as the first when using fetch syntax: + +```tut:book +fetchUser.runA[Id](cache) +``` + +As you can see, when all the data is cached, no query to the data sources is executed since the results are available in the cache. ```tut:silent @@ -424,7 +450,7 @@ val fetchManyUsers: Fetch[List[User]] = List(1, 2, 3).traverse(getUser) If only part of the data is cached, the cached data won't be asked for: ```tut:book -await(fetchManyUsers.runA[Future](cache)) +fetchManyUsers.runA[Id](cache) ``` ## Replaying a fetch without querying any data source @@ -438,9 +464,9 @@ Knowing this, we can replay a fetch reusing the cache of a previous one. The rep data sources. ```tut:book -val populatedCache = await(fetchManyUsers.runE[Future].map(_.cache)) +val env = fetchManyUsers.runE[Id] -await(fetchManyUsers.runA[Future](populatedCache)) +fetchManyUsers.runA[Id](env.cache) ``` ## Implementing a custom cache @@ -473,7 +499,7 @@ val fetchSameTwice: Fetch[(User, User)] = for { another <- getUser(1) } yield (one, another) -await(fetchSameTwice.runA[Future](ForgetfulCache())) +fetchSameTwice.runA[Id](ForgetfulCache()) ``` # Error handling @@ -484,36 +510,40 @@ Fetch is used for reading data from remote sources and the queries we perform ca val fetchError: Fetch[User] = (new Exception("Oh noes")).fetch ``` -If we try to execute and block for its value the exception will be thrown. +If we try to execute to `Id` the exception will be thrown. ```tut:fail -await(fetchError.runA[Future]) +fetchError.runA[Id] ``` -We can use the `FetchMonadError[Future]#attempt` to convert a fetch result into a disjuntion and avoid throwing exceptions. Fetch provides an implicit instance of `FetchMonadError[Future]` that we can import from `fetch.implicits._` to have it available. +Since `Id` runs the fetch eagerly, the only way to recover from errors when running it is surrounding it with a `try-catch` block. We'll use Cats' `Eval` type as the target +monad which, instead of evaluating the fetch eagerly, gives us an `Eval[A]` that we can run anytime with its `.value` method. + +We can use the `FetchMonadError[Eval]#attempt` to convert a fetch result into a disjuntion and avoid throwing exceptions. Fetch provides an implicit instance of `FetchMonadError[Eval]` that we can import from `fetch.unsafe.implicits._` to have it available. ```tut:silent -import fetch.implicits._ +import fetch.unsafe.implicits._ ``` -Now we can convert `Future[User]` into `Future[Throwable Xor User]` and capture exceptions as values in the left of the disjunction. +Now we can convert `Eval[User]` into `Eval[Throwable Xor User]` and capture exceptions as values in the left of the disjunction. ```tut:book +import cats.Eval import cats.data.Xor -val safeResult: Future[Throwable Xor User] = FetchMonadError[Future].attempt(fetchError.runA) -val finalValue: Throwable Xor User = await(safeResult) +val safeResult: Eval[Throwable Xor User] = FetchMonadError[Eval].attempt(fetchError.runA[Eval]) + +safeResult.value ``` -And more succintly with Cat's applicative error syntax. +And more succintly with Cats' applicative error syntax. ```tut:book import cats.syntax.applicativeError._ -await(fetchError.runA[Future].attempt) +fetchError.runA[Eval].attempt.value ``` - ## Missing identities You've probably noticed that `DataSource.fetch` takes a list of identities and returns a map of identities to their result, taking @@ -541,7 +571,7 @@ val fetchPure: Fetch[Int] = 42.fetch Executing a pure fetch doesn't query any data source, as expected. ```tut:book -await(fetchPure.runA[Future]) +fetchPure.runA[Id] ``` ### error @@ -555,7 +585,7 @@ val fetchFail: Fetch[Int] = Fetch.error(new Exception("Something went terribly w Note that interpreting an errorful fetch to `Future` and blocking for its result will throw the exception. ```tut:fail -await(fetchFail.runA[Future]) +fetchFail.runA[Id] ``` ### join @@ -569,7 +599,7 @@ val fetchJoined: Fetch[(Post, User)] = getPost(1).join(getUser(2)) If the fetches are to the same data source they will be batched; if they aren't, they will be evaluated at the same time. ```tut:book -await(fetchJoined.runA) +fetchJoined.runA[Id] ``` ### runA @@ -577,7 +607,7 @@ await(fetchJoined.runA) Run directly any fetch to a `Future` with `fetch1.runA`. ```tut:book -await(getPost(1).runA) +getPost(1).runA[Id] ``` ### runE @@ -585,7 +615,7 @@ await(getPost(1).runA) Extract a fetch an get it's runtime environment `fetch1.runE`. ```tut:book -await(getPost(1).runE) +getPost(1).runE[Id] ``` ### runF @@ -593,12 +623,12 @@ await(getPost(1).runE) Run a fetch obtaining the environment and final value `fetch1.runF`. ```tut:book -await(getPost(1).runF) +getPost(1).runF[Id] ``` ## Companion object -We've been using cats' syntax and `fetch.syntax` throughout the examples since it's more concise and general than the +We've been using Cats' syntax and `fetch.syntax` throughout the examples since it's more concise and general than the methods in the `Fetch` companion object. However, you can use the methods in the companion object directly. @@ -615,7 +645,7 @@ val fetchPure: Fetch[Int] = Fetch.pure(42) Executing a pure fetch doesn't query any data source, as expected. ```tut:book -await(Fetch.run[Future](fetchPure)) +Fetch.run[Id](fetchPure) ``` ### error @@ -629,7 +659,7 @@ val fetchFail: Fetch[Int] = Fetch.error(new Exception("Something went terribly w Note that interpreting an errorful fetch to `Future` won't throw the exception until we block for its result it. ```tut:fail -await(Fetch.run(fetchFail)) +Fetch.run[Id](fetchFail) ``` ### join @@ -643,7 +673,7 @@ val fetchJoined: Fetch[(Post, User)] = Fetch.join(getPost(1), getUser(2)) If the fetches are to the same data source they will be batched; if they aren't, they will be evaluated at the same time. ```tut:book -await(Fetch.run(fetchJoined)) +Fetch.run[Id](fetchJoined) ``` ### sequence @@ -658,7 +688,7 @@ val fetchSequence: Fetch[List[User]] = Fetch.sequence(List(getUser(1), getUser(2 Note that `Fetch#sequence` is not as general as the `sequence` method from `Traverse`, but performs the same optimizations. ```tut:book -await(Fetch.run(fetchSequence)) +Fetch.run[Id](fetchSequence) ``` ### traverse @@ -672,13 +702,13 @@ val fetchTraverse: Fetch[List[User]] = Fetch.traverse(List(1, 2, 3))(getUser) Note that `Fetch#traverse` is not as general as the `traverse` method from `Traverse`, but performs the same optimizations. ```tut:book -await(Fetch.run(fetchTraverse)) +Fetch.run[Id](fetchTraverse) ``` ## cats -Fetch is built using cats' Free monad construction and thus works out of the box with -cats syntax. Using cats' syntax, we can make fetch declarations more concise, without +Fetch is built using Cats' Free monad construction and thus works out of the box with +cats syntax. Using Cats' syntax, we can make fetch declarations more concise, without the need to use the combinators in the `Fetch` companion object. Fetch provides its own instance of `Applicative[Fetch]`. Whenever we use applicative @@ -703,7 +733,7 @@ val fetchThree: Fetch[(Post, User, Post)] = (getPost(1) |@| getUser(2) |@| getPo Notice how the queries to posts are batched. ```tut:book -await(fetchThree.runA) +fetchThree.runA[Id] ``` More interestingly, we can use it to apply a pure function to the results of various @@ -714,7 +744,7 @@ val fetchFriends: Fetch[String] = (getUser(1) |@| getUser(2)).map({ (one, other) s"${one.username} is friends with ${other.username}" }) -await(fetchFriends.runA) +fetchFriends.runA[Id] ``` The above example is equivalent to the following using the `Fetch#join` method: @@ -724,7 +754,7 @@ val fetchFriends: Fetch[String] = Fetch.join(getUser(1), getUser(2)).map({ case s"${one.username} is friends with ${other.username}" }) -await(fetchFriends.runA) +fetchFriends.runA[Id] ``` # Concurrency @@ -739,8 +769,6 @@ custom type. As you have learned through the examples, you can run a fetch into a `Future` simply by importing `fetch.implicits`. It contains an instance of `FetchMonadError[Future]` given that you provide an implicit `ExecutionContext`. -Futures are available both in Scala and Scala.js although in the examples we have used some APIs that are exclusive -to Scala (`Thread` and `Await`) for educational purposes. ## Twitter Future diff --git a/docs/src/tut/index.md b/docs/src/tut/index.md index cfab08d0..974086b4 100644 --- a/docs/src/tut/index.md +++ b/docs/src/tut/index.md @@ -89,28 +89,24 @@ def fetchString(n: Int): Fetch[String] = Fetch(n) // or, more explicitly: Fetch( Now that we can convert `Int` values to `Fetch[String]`, let's try creating a fetch. ```tut:silent -import fetch.syntax._ - val fetchOne: Fetch[String] = fetchString(1) ``` -We'll run our fetches to the well-known `Future` type in our examples, let's do some standard imports. +We'll run our fetches to the ambiend `Id` monad in our examples, let's do some imports. ```tut:silent -import scala.concurrent._ -import ExecutionContext.Implicits.global -import scala.concurrent.duration._ - -// can only define this in Scala, not in Scala.js -def await[A](t: Future[A]): A = Await.result(t, Duration.Inf) +import cats.Id +import fetch.unsafe.implicits._ +import fetch.syntax._ ``` -And wait for the fetch to complete, note that you cannot block for a Future in Scala.js: +Note that in real-life scenarios you'll want to run a fetch to a concurrency monad, synchronous execution of a fetch +is only supported in Scala and not Scala.js and is meant for experimentation purposes. -```tut:book -import fetch.implicits._ +Let's run it and wait for the fetch to complete: -await(fetchOne.runA[Future]) +```tut:book +fetchOne.runA[Id] ``` As you can see in the previous example, the `ToStringSource` is queried once to get the value of 1. @@ -128,7 +124,7 @@ val fetchThree: Fetch[(String, String, String)] = (fetchString(1) |@| fetchStrin When executing the above fetch, note how the three identities get batched and the data source is only queried once. ```tut:book -await(fetchThree.runA[Future]) +fetchThree.runA[Id] ``` ## Parallelism @@ -165,7 +161,7 @@ val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).t Note how the two independent data fetches are run in parallel, minimizing the latency cost of querying the two data sources. ```tut:book -await(fetchMulti.runA[Future]) +fetchMulti.runA[Id] ``` ## Caching @@ -182,6 +178,6 @@ val fetchTwice: Fetch[(String, String)] = for { While running it, notice that the data source is only queried once. The next time the identity is requested it's served from the cache. ```tut:book -await(fetchTwice.runA[Future]) +fetchTwice.runA[Id] ``` From 3aef47ba6825034e828b4d38d9595eeec3a0c4f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sat, 4 Jun 2016 14:02:33 +0200 Subject: [PATCH 22/40] Update README --- README.md | 119 ++++++++++++++++++-------------------------------- tut/README.md | 51 ++++++++-------------- 2 files changed, 61 insertions(+), 109 deletions(-) diff --git a/README.md b/README.md index 13bb52b0..eaa1e785 100644 --- a/README.md +++ b/README.md @@ -89,45 +89,23 @@ import fetch.syntax._ val fetchOne: Fetch[String] = fetchString(1) ``` -Now that we have created a fetch, we can run it to a `Task`. Note that when we create a task we are not computing any value yet. Having a `Task` instance allows us to try to run it synchronously or asynchronously, choosing a scheduler. +We'll run our fetches to the ambiend `Id` monad in our examples, let's do some imports. ```scala -import fetch.implicits._ -// import fetch.implicits._ - -import scala.concurrent._ -// import scala.concurrent._ - -import ExecutionContext.Implicits.global -// import ExecutionContext.Implicits.global - -val result: Future[String] = fetchOne.runA[Future] -// result: scala.concurrent.Future[String] = List() -``` - -Since we calculated the results eagerly using `Task#now`, we can run this fetch synchronously. - -```scala -import scala.concurrent.duration._ -// [152] One ToString 1 -// import scala.concurrent.duration._ - -Await.result(result, Duration.Inf) -// res3: String = 1 +import cats.Id +import fetch.unsafe.implicits._ +import fetch.syntax._ ``` -As you can see in the previous example, the `ToStringSource` is queried once to get the value of 1. +Note that in real-life scenarios you'll want to run a fetch to a concurrency monad, synchronous execution of a fetch +is only supported in Scala and not Scala.js and is meant for experimentation purposes. +Let's run it and wait for the fetch to complete: ```scala -import scala.concurrent._ -// import scala.concurrent._ - -import scala.concurrent.duration._ -// import scala.concurrent.duration._ - -def await[A](t: Future[A]): A = Await.result(t, Duration.Inf) -// await: [A](t: scala.concurrent.Future[A])A +fetchOne.runA[Id] +// [102] One ToString 1 +// res3: cats.Id[String] = 1 ``` ## Batching @@ -139,19 +117,15 @@ import cats.syntax.cartesian._ // import cats.syntax.cartesian._ val fetchThree: Fetch[(String, String, String)] = (fetchString(1) |@| fetchString(2) |@| fetchString(3)).tupled -// fetchThree: fetch.Fetch[(String, String, String)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List(2, 3)),ToStringSource$@183bd58)))),),) - -val result: Future[(String, String, String)] = fetchThree.runA[Future] -// result: scala.concurrent.Future[(String, String, String)] = List() +// fetchThree: fetch.Fetch[(String, String, String)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List(2, 3)),ToStringSource$@6fb256ea)))),),) ``` - -When executing the above fetch, note how the three identities get batched and the data source is only queried once. Let's pretend we have a function from `Future[A]` to `A` called `await`. +When executing the above fetch, note how the three identities get batched and the data source is only queried once. ```scala -await(result) -// [152] Many ToString OneAnd(1,List(2, 3)) -// res4: (String, String, String) = (1,2,3) +fetchThree.runA[Id] +// [102] Many ToString OneAnd(1,List(2, 3)) +// res4: cats.Id[(String, String, String)] = (1,2,3) ``` ## Parallelism @@ -161,43 +135,37 @@ If we combine two independent fetches from different data sources, the fetches c This time, instead of creating the results with `Query#later` we are going to do it with `Query#async` for emulating an asynchronous data source. ```scala -scala> implicit object LengthSource extends DataSource[String, Int]{ - | override def fetchOne(id: String): Query[Option[Int]] = { - | Query.async((ok, fail) => { - | println(s"[${Thread.currentThread.getId}] One Length $id") - | ok(Option(id.size)) - | }) - | } - | override def fetchMany(ids: NonEmptyList[String]): Query[Map[String, Int]] = { - | Query.async((ok, fail) => { - | println(s"[${Thread.currentThread.getId}] Many Length $ids") - | ok(ids.unwrap.map(i => (i, i.size)).toMap) - | }) - | } - | } -defined object LengthSource - -scala> def fetchLength(s: String): Fetch[Int] = Fetch(s) -fetchLength: (s: String)fetch.Fetch[Int] +implicit object LengthSource extends DataSource[String, Int]{ + override def fetchOne(id: String): Query[Option[Int]] = { + Query.async((ok, fail) => { + println(s"[${Thread.currentThread.getId}] One Length $id") + ok(Option(id.size)) + }) + } + override def fetchMany(ids: NonEmptyList[String]): Query[Map[String, Int]] = { + Query.async((ok, fail) => { + println(s"[${Thread.currentThread.getId}] Many Length $ids") + ok(ids.unwrap.map(i => (i, i.size)).toMap) + }) + } +} + +def fetchLength(s: String): Fetch[Int] = Fetch(s) ``` And now we can easily receive data from the two sources in a single fetch. ```scala -scala> val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled -fetchMulti: fetch.Fetch[(String, Int)] = Gosub(Gosub(Suspend(Concurrent(List(FetchOne(1,ToStringSource$@183bd58), FetchOne(one,LengthSource$@1d6700b5)))),),) - -scala> val result = fetchMulti.runA[Future] -result: scala.concurrent.Future[(String, Int)] = List() -[163] One ToString 1 +val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled ``` Note how the two independent data fetches are run in parallel, minimizing the latency cost of querying the two data sources. ```scala -scala> await(result) -[152] One Length one -res5: (String, Int) = (1,3) +fetchMulti.runA[Id] +// [102] One ToString 1 +// [103] One Length one +// res6: cats.Id[(String, Int)] = (1,3) ``` ## Caching @@ -205,17 +173,16 @@ res5: (String, Int) = (1,3) When fetching an identity, subsequent fetches for the same identity are cached. Let's try creating a fetch that asks for the same identity twice. ```scala -scala> val fetchTwice: Fetch[(String, String)] = for { - | one <- fetchString(1) - | two <- fetchString(1) - | } yield (one, two) -fetchTwice: fetch.Fetch[(String, String)] = Gosub(Suspend(FetchOne(1,ToStringSource$@183bd58)),) +val fetchTwice: Fetch[(String, String)] = for { + one <- fetchString(1) + two <- fetchString(1) +} yield (one, two) ``` While running it, notice that the data source is only queried once. The next time the identity is requested it's served from the cache. ```scala -scala> val result: (String, String) = await(fetchTwice.runA[Future]) -[152] One ToString 1 -result: (String, String) = (1,1) +fetchTwice.runA[Id] +// [102] One ToString 1 +// res7: cats.Id[(String, String)] = (1,1) ``` diff --git a/tut/README.md b/tut/README.md index 51cb5698..9fccc2d4 100644 --- a/tut/README.md +++ b/tut/README.md @@ -94,33 +94,21 @@ import fetch.syntax._ val fetchOne: Fetch[String] = fetchString(1) ``` -Now that we have created a fetch, we can run it to a `Task`. Note that when we create a task we are not computing any value yet. Having a `Task` instance allows us to try to run it synchronously or asynchronously, choosing a scheduler. +We'll run our fetches to the ambiend `Id` monad in our examples, let's do some imports. -```tut:book -import fetch.implicits._ - -import scala.concurrent._ -import ExecutionContext.Implicits.global - -val result: Future[String] = fetchOne.runA[Future] -``` - -Since we calculated the results eagerly using `Task#now`, we can run this fetch synchronously. - -```tut:book -import scala.concurrent.duration._ - -Await.result(result, Duration.Inf) +```tut:silent +import cats.Id +import fetch.unsafe.implicits._ +import fetch.syntax._ ``` -As you can see in the previous example, the `ToStringSource` is queried once to get the value of 1. +Note that in real-life scenarios you'll want to run a fetch to a concurrency monad, synchronous execution of a fetch +is only supported in Scala and not Scala.js and is meant for experimentation purposes. +Let's run it and wait for the fetch to complete: ```tut:book -import scala.concurrent._ -import scala.concurrent.duration._ - -def await[A](t: Future[A]): A = Await.result(t, Duration.Inf) +fetchOne.runA[Id] ``` ## Batching @@ -131,14 +119,12 @@ Multiple fetches to the same data source are automatically batched. For illustra import cats.syntax.cartesian._ val fetchThree: Fetch[(String, String, String)] = (fetchString(1) |@| fetchString(2) |@| fetchString(3)).tupled -val result: Future[(String, String, String)] = fetchThree.runA[Future] ``` - -When executing the above fetch, note how the three identities get batched and the data source is only queried once. Let's pretend we have a function from `Future[A]` to `A` called `await`. +When executing the above fetch, note how the three identities get batched and the data source is only queried once. ```tut:book -await(result) +fetchThree.runA[Id] ``` ## Parallelism @@ -147,7 +133,7 @@ If we combine two independent fetches from different data sources, the fetches c This time, instead of creating the results with `Query#later` we are going to do it with `Query#async` for emulating an asynchronous data source. -```tuto:silent +```tut:silent implicit object LengthSource extends DataSource[String, Int]{ override def fetchOne(id: String): Query[Option[Int]] = { Query.async((ok, fail) => { @@ -168,22 +154,21 @@ def fetchLength(s: String): Fetch[Int] = Fetch(s) And now we can easily receive data from the two sources in a single fetch. -```tuto:book +```tut:silent val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled -val result = fetchMulti.runA[Future] ``` Note how the two independent data fetches are run in parallel, minimizing the latency cost of querying the two data sources. -```tuto:book -await(result) +```tut:book +fetchMulti.runA[Id] ``` ## Caching When fetching an identity, subsequent fetches for the same identity are cached. Let's try creating a fetch that asks for the same identity twice. -```tuto:book +```tut:silent val fetchTwice: Fetch[(String, String)] = for { one <- fetchString(1) two <- fetchString(1) @@ -192,6 +177,6 @@ val fetchTwice: Fetch[(String, String)] = for { While running it, notice that the data source is only queried once. The next time the identity is requested it's served from the cache. -```tuto:book -val result: (String, String) = await(fetchTwice.runA[Future]) +```tut:book +fetchTwice.runA[Id] ``` From be095c096d71cb48dff134bb8660912d450e82ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sat, 4 Jun 2016 14:04:28 +0200 Subject: [PATCH 23/40] Silence irrelevant output --- README.md | 18 ++++++++---------- tut/README.md | 2 +- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index eaa1e785..4dc9d0c5 100644 --- a/README.md +++ b/README.md @@ -104,7 +104,7 @@ Let's run it and wait for the fetch to complete: ```scala fetchOne.runA[Id] -// [102] One ToString 1 +// [111] One ToString 1 // res3: cats.Id[String] = 1 ``` @@ -114,18 +114,16 @@ Multiple fetches to the same data source are automatically batched. For illustra ```scala import cats.syntax.cartesian._ -// import cats.syntax.cartesian._ val fetchThree: Fetch[(String, String, String)] = (fetchString(1) |@| fetchString(2) |@| fetchString(3)).tupled -// fetchThree: fetch.Fetch[(String, String, String)] = Gosub(Gosub(Suspend(Concurrent(List(FetchMany(OneAnd(1,List(2, 3)),ToStringSource$@6fb256ea)))),),) ``` When executing the above fetch, note how the three identities get batched and the data source is only queried once. ```scala fetchThree.runA[Id] -// [102] Many ToString OneAnd(1,List(2, 3)) -// res4: cats.Id[(String, String, String)] = (1,2,3) +// [111] Many ToString OneAnd(1,List(2, 3)) +// res5: cats.Id[(String, String, String)] = (1,2,3) ``` ## Parallelism @@ -163,9 +161,9 @@ Note how the two independent data fetches are run in parallel, minimizing the la ```scala fetchMulti.runA[Id] -// [102] One ToString 1 -// [103] One Length one -// res6: cats.Id[(String, Int)] = (1,3) +// [111] One ToString 1 +// [112] One Length one +// res7: cats.Id[(String, Int)] = (1,3) ``` ## Caching @@ -183,6 +181,6 @@ While running it, notice that the data source is only queried once. The next tim ```scala fetchTwice.runA[Id] -// [102] One ToString 1 -// res7: cats.Id[(String, String)] = (1,1) +// [111] One ToString 1 +// res8: cats.Id[(String, String)] = (1,1) ``` diff --git a/tut/README.md b/tut/README.md index 9fccc2d4..9abe318a 100644 --- a/tut/README.md +++ b/tut/README.md @@ -115,7 +115,7 @@ fetchOne.runA[Id] Multiple fetches to the same data source are automatically batched. For illustrating it, we are going to compose three independent fetch results as a tuple. -```tut:book +```tut:silent import cats.syntax.cartesian._ val fetchThree: Fetch[(String, String, String)] = (fetchString(1) |@| fetchString(2) |@| fetchString(3)).tupled From 0c8ca327b9a2f9e40e57943733cf2103d1e438cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sat, 4 Jun 2016 15:53:39 +0200 Subject: [PATCH 24/40] Document Query constructors --- docs/src/tut/docs.md | 51 +++++++++++++++++++++++++------ shared/src/main/scala/fetch.scala | 3 +- 2 files changed, 44 insertions(+), 10 deletions(-) diff --git a/docs/src/tut/docs.md b/docs/src/tut/docs.md index 15e54f5c..40fee1cb 100644 --- a/docs/src/tut/docs.md +++ b/docs/src/tut/docs.md @@ -151,6 +151,48 @@ implicit object UnbatchedSource extends DataSource[Int, Int]{ } ``` +## Queries + +Queries are a way of separating the computation required to read a piece of data from the context in +which is run. Let's look at the various ways we have of constructing queries. + +### Synchronous + +A query can be synchronous, and we may want to evaluate it when `fetchOne` and `fetchMany` +are called. We can do so with `Query#now`: + +```tut:book +Query.now(42) +``` + +You can also construct lazy queries that can evaluate synchronously with `Query#later`: + +```tut:book +Query.later({ println("Computing 42"); 42 }) +``` + +Synchronous queries simply wrap a Cats' `Eval` instance, which captures the notion of a lazy synchronous +computation. You can lift an `Eval[A]` into a `Query[A]` too: + +```tut:book +import cats.Eval + +Query.sync(Eval.always({ println("Computing 42"); 42 })) +``` + +### Asynchronous + +Asynchronous queries are constructed passing a function that accepts a callback (`A => Unit`) and an errback +(`Throwable => Unit`) and performs the asynchronous computation. Note that you must ensure that either the +callback or the errback are called. + +```tut:book +Query.async((ok: (Int => Unit), fail) => { + Thread.sleep(100) + ok(42) +}) +``` + ## Creating and running a fetch We are now ready to create and run fetches. Note the distinction between Fetch creation and execution. @@ -403,15 +445,6 @@ As you may have guessed, all the optimizations made by `sequence` still apply wh fetchTraverse.runA[Id] ``` -# Queries - -## Synchronous - -### Now - -### Later - -## Asynchronous # Caching diff --git a/shared/src/main/scala/fetch.scala b/shared/src/main/scala/fetch.scala index 9be36707..7f50ca23 100644 --- a/shared/src/main/scala/fetch.scala +++ b/shared/src/main/scala/fetch.scala @@ -41,9 +41,10 @@ object Query { type Callback[A] = A => Unit type Errback = Throwable => Unit - def apply[A](x: A): Query[A] = Sync(Eval.now(x)) def now[A](x: A): Query[A] = Sync(Eval.now(x)) def later[A](th: => A): Query[A] = Sync(Eval.later(th)) + + def sync[A](e: Eval[A]) = Sync(e) def async[A]( action: (Callback[A], Errback) => Unit, timeout: Duration = Duration.Inf From ba68bceef2012d9594c4145a3e5aa68dbf9ebf3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sat, 4 Jun 2016 18:07:04 +0200 Subject: [PATCH 25/40] Document Monix Task as the target monad --- build.sbt | 5 +- docs/src/tut/docs.md | 220 ++++++++++++------ monix/shared/src/main/scala/implicits.scala | 64 +++-- .../src/test/scala/FetchTaskTests.scala | 2 +- 4 files changed, 202 insertions(+), 89 deletions(-) diff --git a/build.sbt b/build.sbt index b1f6cf35..660eed4b 100644 --- a/build.sbt +++ b/build.sbt @@ -68,7 +68,7 @@ lazy val docs = (project in file("docs")) .settings( moduleName := "fetch-docs" ) - .dependsOn(fetchJVM) + .dependsOn(fetchJVM, fetchMonixJVM) .enablePlugins(JekyllPlugin) .settings(docsSettings: _*) .settings(noPublishSettings) @@ -129,8 +129,7 @@ lazy val readme = (project in file("tut")) lazy val monixSettings = ( libraryDependencies ++= Seq( - "io.monix" %%% "monix-eval" % "2.0-RC5", - "io.monix" %%% "monix-cats" % "2.0-RC5" + "io.monix" %%% "monix-eval" % "2.0-RC5" ) ) diff --git a/docs/src/tut/docs.md b/docs/src/tut/docs.md index 40fee1cb..9231264f 100644 --- a/docs/src/tut/docs.md +++ b/docs/src/tut/docs.md @@ -98,6 +98,18 @@ type UserId = Int case class User(id: UserId, username: String) ``` +We'll simulate unpredictable latency with this function. + +```tut:silent +def latency[A](result: A, msg: String) = { + val id = Thread.currentThread.getId + println(s"~~> [$id] $msg") + Thread.sleep(100) + println(s"<~~ [$id] $msg") + result +} +``` + And now we're ready to write our user data source; we'll emulate a database with an in-memory map. ```tut:silent @@ -109,20 +121,19 @@ import fetch._ val userDatabase: Map[UserId, User] = Map( 1 -> User(1, "@one"), 2 -> User(2, "@two"), - 3 -> User(3, "@three") + 3 -> User(3, "@three"), + 4 -> User(4, "@four") ) implicit object UserSource extends DataSource[UserId, User]{ override def fetchOne(id: UserId): Query[Option[User]] = { Query.later({ - println(s"[${Thread.currentThread.getId}] One User $id") - userDatabase.get(id) + latency(userDatabase.get(id), s"One User $id") }) } override def fetchMany(ids: NonEmptyList[UserId]): Query[Map[UserId, User]] = { Query.later({ - println(s"[${Thread.currentThread.getId}] Many Users $ids") - userDatabase.filterKeys(ids.unwrap.contains) + latency(userDatabase.filterKeys(ids.unwrap.contains), s"Many Users $ids") }) } } @@ -151,47 +162,6 @@ implicit object UnbatchedSource extends DataSource[Int, Int]{ } ``` -## Queries - -Queries are a way of separating the computation required to read a piece of data from the context in -which is run. Let's look at the various ways we have of constructing queries. - -### Synchronous - -A query can be synchronous, and we may want to evaluate it when `fetchOne` and `fetchMany` -are called. We can do so with `Query#now`: - -```tut:book -Query.now(42) -``` - -You can also construct lazy queries that can evaluate synchronously with `Query#later`: - -```tut:book -Query.later({ println("Computing 42"); 42 }) -``` - -Synchronous queries simply wrap a Cats' `Eval` instance, which captures the notion of a lazy synchronous -computation. You can lift an `Eval[A]` into a `Query[A]` too: - -```tut:book -import cats.Eval - -Query.sync(Eval.always({ println("Computing 42"); 42 })) -``` - -### Asynchronous - -Asynchronous queries are constructed passing a function that accepts a callback (`A => Unit`) and an errback -(`Throwable => Unit`) and performs the asynchronous computation. Note that you must ensure that either the -callback or the errback are called. - -```tut:book -Query.async((ok: (Int => Unit), fail) => { - Thread.sleep(100) - ok(42) -}) -``` ## Creating and running a fetch @@ -300,6 +270,49 @@ As you can see, the `User` with id 1 was fetched only once in a single round-tri time it was needed we used the cached versions, thus avoiding another request to the user data source. + +## Queries + +Queries are a way of separating the computation required to read a piece of data from the context in +which is run. Let's look at the various ways we have of constructing queries. + +### Synchronous + +A query can be synchronous, and we may want to evaluate it when `fetchOne` and `fetchMany` +are called. We can do so with `Query#now`: + +```tut:book +Query.now(42) +``` + +You can also construct lazy queries that can evaluate synchronously with `Query#later`: + +```tut:book +Query.later({ println("Computing 42"); 42 }) +``` + +Synchronous queries simply wrap a Cats' `Eval` instance, which captures the notion of a lazy synchronous +computation. You can lift an `Eval[A]` into a `Query[A]` too: + +```tut:book +import cats.Eval + +Query.sync(Eval.always({ println("Computing 42"); 42 })) +``` + +### Asynchronous + +Asynchronous queries are constructed passing a function that accepts a callback (`A => Unit`) and an errback +(`Throwable => Unit`) and performs the asynchronous computation. Note that you must ensure that either the +callback or the errback are called. + +```tut:book +Query.async((ok: (Int => Unit), fail) => { + Thread.sleep(100) + ok(42) +}) +``` + ## Combining data from multiple sources Now that we know about some of the optimizations that Fetch can perform to read data efficiently, @@ -325,14 +338,12 @@ val postDatabase: Map[PostId, Post] = Map( implicit object PostSource extends DataSource[PostId, Post]{ override def fetchOne(id: PostId): Query[Option[Post]] = { Query.later({ - println(s"[${Thread.currentThread.getId}] One Posts $id") - postDatabase.get(id) + latency(postDatabase.get(id), s"One Post $id") }) } override def fetchMany(ids: NonEmptyList[PostId]): Query[Map[PostId, Post]] = { Query.later({ - println(s"[${Thread.currentThread.getId}] Many Posts $ids") - postDatabase.filterKeys(ids.unwrap.contains) + latency(postDatabase.filterKeys(ids.unwrap.contains), s"Many Posts $ids") }) } } @@ -346,13 +357,40 @@ We can also implement a function for fetching a post's author given a post: def getAuthor(p: Post): Fetch[User] = Fetch(p.author) ``` +Apart from posts, we are going to add another data source: one for post topics. + +```tut:silent +type PostTopic = String +``` + +We'll implement a data source for retrieving a post topic given a post id. + +```tut:silent +implicit object PostTopicSource extends DataSource[Post, PostTopic]{ + override def fetchOne(id: Post): Query[Option[PostTopic]] = { + Query.later({ + val topic = if (id.id % 2 == 0) "monad" else "applicative" + latency(Option(topic), s"One Post Topic $id") + }) + } + override def fetchMany(ids: NonEmptyList[Post]): Query[Map[Post, PostTopic]] = { + Query.later({ + val result = ids.unwrap.map(id => (id, if (id.id % 2 == 0) "monad" else "applicative")).toMap + latency(result, s"Many Post Topics $ids") + }) + } +} + +def getPostTopic(post: Post): Fetch[PostTopic] = Fetch(post) +``` + Now that we have multiple sources let's mix them in the same fetch. ```tut:silent -val fetchMulti: Fetch[(Post, User)] = for { +val fetchMulti: Fetch[(Post, PostTopic)] = for { post <- getPost(1) - user <- getAuthor(post) -} yield (post, user) + topic <- getPostTopic(post) +} yield (post, topic) ``` We can now run the previous fetch, querying the posts data source first and the user data source afterwards. @@ -361,7 +399,7 @@ We can now run the previous fetch, querying the posts data source first and the fetchMulti.runA[Id] ``` -In the previous example, we fetched a post given its id and then fetched its author. This +In the previous example, we fetched a post given its id and then fetched its topic. This data could come from entirely different places, but Fetch makes working with heterogeneous sources of data very easy. @@ -531,7 +569,7 @@ val fetchSameTwice: Fetch[(User, User)] = for { one <- getUser(1) another <- getUser(1) } yield (one, another) - + fetchSameTwice.runA[Id](ForgetfulCache()) ``` @@ -609,7 +647,7 @@ fetchPure.runA[Id] ### error -Errors can also be lifted to the Fetch monad via `exception.fetch`. +Errors can also be lifted to the Fetch monad via `exception.fetch`. ```tut:silent val fetchFail: Fetch[Int] = Fetch.error(new Exception("Something went terribly wrong")) @@ -683,7 +721,7 @@ Fetch.run[Id](fetchPure) ### error -Errors can also be lifted to the Fetch monad via `Fetch#error`. +Errors can also be lifted to the Fetch monad via `Fetch#error`. ```tut:silent val fetchFail: Fetch[Int] = Fetch.error(new Exception("Something went terribly wrong")) @@ -790,40 +828,82 @@ val fetchFriends: Fetch[String] = Fetch.join(getUser(1), getUser(2)).map({ case fetchFriends.runA[Id] ``` -# Concurrency +# Concurrency monads Fetch lets you choose the concurrency monad you want for running fetches, supporting the Scala and Scala.js standard library concurrency primitives. However not everyone is using `Future` and Fetch acknowledges it, providing support for the most widespread concurrency monads and making it easy for users to run a fetch to a custom type. +For supporting running a fetch to a monad `M[_]` an instance of `FetchMonadError[M]` must be available. + +We'll use the following fetches for the examples. They show how we can combine independent fetches both for +batching and exploiting the concurrency of independent data. + +```tut:silent +val postsByAuthor: Fetch[List[Post]] = for { + posts <- List(1, 2).traverse(getPost) + authors <- posts.traverse(getAuthor) + ordered = (posts zip authors).sortBy({ case (_, author) => author.username }).map(_._1) +} yield ordered + +val postTopics: Fetch[Map[PostTopic, Int]] = for { + posts <- List(2, 3).traverse(getPost) + topics <- posts.traverse(getPostTopic) + countByTopic = (posts zip topics).groupBy(_._2).mapValues(_.size) +} yield countByTopic + +val homePage = (postsByAuthor |@| postTopics).tupled +``` + ## Future -As you have learned through the examples, you can run a fetch into a `Future` simply by importing `fetch.implicits`. It +You can run a fetch into a `Future` simply by importing `fetch.implicits`. It contains an instance of `FetchMonadError[Future]` given that you provide an implicit `ExecutionContext`. +For the sake of the examples we'll use the global `ExecutionContext`. -## Twitter Future +```tut:book +Await.result(Fetch.run[Future](homePage), Duration.Inf) +``` -TODO +## Monix Task -## Scalaz task +The [Monix](https://monix.io/) library provides an abstraction for lazy, asynchronous computations with its [Task](https://monix.io/docs/2x/eval/task.html) type. -TODO +For using `Task` as the target concurrency monad of a fetch, add the following dependency to your build file: -## Monix Task +```scala +"com.fortysevendeg" %% "fetch-monix" %% "0.2.0" +``` -Scala and Scala.js +And do some standard imports, we'll need an Scheduler for running our tasks as well as the instance of `FetchMonadError[Task]` that `fetch-monix` provids: -### JVM +```tut:silent +import monix.eval.Task +import monix.execution.Scheduler + +import fetch.monixTask.implicits._ +``` -Scheduler for io in JVM +Note that running a fetch to a `Task` doesn't trigger execution. We can interpret a task to a `Future` with the `Task#runAsync` method. We'll use the global scheduler for now. -### JS +```tut:book +val scheduler = Scheduler.Implicits.global +val task = Fetch.run[Task](homePage) + +Await.result(task.runAsync(scheduler), Duration.Inf) +``` + +### JVM + +In the JVM, you may want to choose a [scheduler tuned for IO workloads](https://monix.io/docs/2x/execution/scheduler.html#builders-on-the-jvm) to interpret fetches. -## Eval +```tut:book +val ioSched = Scheduler.io(name="io-scheduler") -Unsafe, for testing purposes, jvm only +Await.result(task.runAsync(ioSched), Duration.Inf) +``` # Resources diff --git a/monix/shared/src/main/scala/implicits.scala b/monix/shared/src/main/scala/implicits.scala index dc1334de..ab03f464 100644 --- a/monix/shared/src/main/scala/implicits.scala +++ b/monix/shared/src/main/scala/implicits.scala @@ -14,27 +14,55 @@ * limitations under the License. */ -package fetch.monix +package fetch.monixTask import fetch._ -import _root_.monix.eval.Task -import _root_.monix.execution.{Scheduler, Cancelable} +import cats.{Eval, Now, Later, Always, Traverse, Applicative} + +import monix.eval.Task +import monix.execution.{Scheduler, Cancelable} + +import scala.concurrent.duration._ object implicits { + def evalToTask[A](e: Eval[A]): Task[A] = e match { + case Now(x) => Task.now(x) + case l: Later[A] => Task.evalOnce({ l.value }) + case a: Always[A] => Task.evalAlways({ a.value }) + case other => Task.evalOnce({ other.value }) + } + + implicit val fetchTaskApplicative: Applicative[Task] = new Applicative[Task] { + override def pureEval[A](e: Eval[A]): Task[A] = evalToTask(e) + def pure[A](x: A): Task[A] = Task.now(x) + def ap[A, B](ff: Task[A => B])(fa: Task[A]): Task[B] = + Task.mapBoth(ff, fa)((f, x) => f(x)) + } + implicit val fetchTaskFetchMonadError: FetchMonadError[Task] = new FetchMonadError[Task] { + override def pureEval[A](e: Eval[A]): Task[A] = evalToTask(e) + def pure[A](x: A): Task[A] = + Task.now(x) + override def runQuery[A](j: Query[A]): Task[A] = j match { - case Sync(x) => pureEval(x) - case Async(ac, timeout) => - Task.create( - (scheduler, callback) => { + case Sync(x) => evalToTask(x) + case Async(ac, timeout) => { + val task: Task[A] = Task.create( + (scheduler, callback) => { + + scheduler.execute(new Runnable { + def run() = ac(callback.onSuccess, callback.onError) + }) - scheduler.execute(new Runnable { - def run() = ac(callback.onSuccess, callback.onError) + Cancelable.empty }) - Cancelable.empty - }) + timeout match { + case finite: FiniteDuration => task.timeout(finite) + case _ => task + } + } case Ap(qf, qx) => Task .zip2(runQuery(qf), runQuery(qx)) @@ -43,12 +71,18 @@ object implicits { }) } - def pure[A](x: A): Task[A] = Task.now(x) + override def map[A, B](fa: Task[A])(f: A => B): Task[B] = + fa.map(f) + + override def sequence[G[_], A](as: G[Task[A]])(implicit G: Traverse[G]): Task[G[A]] = + G.sequence(as)(fetchTaskApplicative) + def handleErrorWith[A](fa: Task[A])(f: Throwable => Task[A]): Task[A] = fa.onErrorHandleWith(f) - override def ap[A, B](f: Task[A => B])(x: Task[A]): Task[B] = - Task.mapBoth(f, x)((f, x) => f(x)) - def raiseError[A](e: Throwable): Task[A] = Task.raiseError(e) + + def raiseError[A](e: Throwable): Task[A] = + Task.raiseError(e) + def flatMap[A, B](fa: Task[A])(f: A => Task[B]): Task[B] = fa.flatMap(f) } diff --git a/monix/shared/src/test/scala/FetchTaskTests.scala b/monix/shared/src/test/scala/FetchTaskTests.scala index 0a7545b3..bc7109b9 100644 --- a/monix/shared/src/test/scala/FetchTaskTests.scala +++ b/monix/shared/src/test/scala/FetchTaskTests.scala @@ -23,7 +23,7 @@ import cats.data.NonEmptyList import cats.std.list._ import fetch._ -import fetch.monix.implicits._ +import fetch.monixTask.implicits._ import scala.concurrent.Future From 1b3d118a944976cc7ec7a7b458ca80a54888a533 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sun, 5 Jun 2016 14:26:41 +0200 Subject: [PATCH 26/40] Improve FetchMonadError[Task] for Monix --- docs/src/tut/docs.md | 8 +++-- monix/shared/src/main/scala/implicits.scala | 38 +++++++++------------ shared/src/main/scala/interpreters.scala | 1 + 3 files changed, 23 insertions(+), 24 deletions(-) diff --git a/docs/src/tut/docs.md b/docs/src/tut/docs.md index 9231264f..50df412f 100644 --- a/docs/src/tut/docs.md +++ b/docs/src/tut/docs.md @@ -370,13 +370,13 @@ implicit object PostTopicSource extends DataSource[Post, PostTopic]{ override def fetchOne(id: Post): Query[Option[PostTopic]] = { Query.later({ val topic = if (id.id % 2 == 0) "monad" else "applicative" - latency(Option(topic), s"One Post Topic $id") + latency(Option(topic), s"One Post Topic $id") }) } override def fetchMany(ids: NonEmptyList[Post]): Query[Map[Post, PostTopic]] = { Query.later({ val result = ids.unwrap.map(id => (id, if (id.id % 2 == 0) "monad" else "applicative")).toMap - latency(result, s"Many Post Topics $ids") + latency(result, s"Many Post Topics $ids") }) } } @@ -905,6 +905,10 @@ val ioSched = Scheduler.io(name="io-scheduler") Await.result(task.runAsync(ioSched), Duration.Inf) ``` +## Custom types + +TODO + # Resources - [Code](https://github.com/47deg/fetch) on GitHub. diff --git a/monix/shared/src/main/scala/implicits.scala b/monix/shared/src/main/scala/implicits.scala index ab03f464..f6fe1f96 100644 --- a/monix/shared/src/main/scala/implicits.scala +++ b/monix/shared/src/main/scala/implicits.scala @@ -33,18 +33,27 @@ object implicits { case other => Task.evalOnce({ other.value }) } - implicit val fetchTaskApplicative: Applicative[Task] = new Applicative[Task] { - override def pureEval[A](e: Eval[A]): Task[A] = evalToTask(e) - def pure[A](x: A): Task[A] = Task.now(x) - def ap[A, B](ff: Task[A => B])(fa: Task[A]): Task[B] = - Task.mapBoth(ff, fa)((f, x) => f(x)) - } - implicit val fetchTaskFetchMonadError: FetchMonadError[Task] = new FetchMonadError[Task] { + override def map[A, B](fa: Task[A])(f: A => B): Task[B] = + fa.map(f) + + override def product[A, B](fa: Task[A], fb: Task[B]): Task[(A, B)] = + Task.zip2(Task.fork(fa), Task.fork(fb)) + override def pureEval[A](e: Eval[A]): Task[A] = evalToTask(e) + def pure[A](x: A): Task[A] = Task.now(x) + def handleErrorWith[A](fa: Task[A])(f: Throwable => Task[A]): Task[A] = + fa.onErrorHandleWith(f) + + def raiseError[A](e: Throwable): Task[A] = + Task.raiseError(e) + + def flatMap[A, B](fa: Task[A])(f: A => Task[B]): Task[B] = + fa.flatMap(f) + override def runQuery[A](j: Query[A]): Task[A] = j match { case Sync(x) => evalToTask(x) case Async(ac, timeout) => { @@ -70,20 +79,5 @@ object implicits { case (f, x) => f(x) }) } - - override def map[A, B](fa: Task[A])(f: A => B): Task[B] = - fa.map(f) - - override def sequence[G[_], A](as: G[Task[A]])(implicit G: Traverse[G]): Task[G[A]] = - G.sequence(as)(fetchTaskApplicative) - - def handleErrorWith[A](fa: Task[A])(f: Throwable => Task[A]): Task[A] = - fa.onErrorHandleWith(f) - - def raiseError[A](e: Throwable): Task[A] = - Task.raiseError(e) - - def flatMap[A, B](fa: Task[A])(f: A => Task[B]): Task[B] = - fa.flatMap(f) } } diff --git a/shared/src/main/scala/interpreters.scala b/shared/src/main/scala/interpreters.scala index 7bf3ea0c..50795a5f 100644 --- a/shared/src/main/scala/interpreters.scala +++ b/shared/src/main/scala/interpreters.scala @@ -78,6 +78,7 @@ trait FetchInterpreters { .asInstanceOf[DataSource[I, A]] .fetchMany(as.asInstanceOf[NonEmptyList[I]])) })) + M.flatMap(sentRequests)((results: List[Map[_, _]]) => { val endRound = System.nanoTime() val newCache = (requests zip results).foldLeft(cache)((accache, resultset) => { From d2f99f2cdbb2a586c514b5f2efdf176b309a9149 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Sun, 5 Jun 2016 15:25:25 +0200 Subject: [PATCH 27/40] Document how to bring your own concurrency monad --- docs/src/tut/docs.md | 98 ++++++++++++++++++++- monix/shared/src/main/scala/implicits.scala | 2 +- 2 files changed, 98 insertions(+), 2 deletions(-) diff --git a/docs/src/tut/docs.md b/docs/src/tut/docs.md index 50df412f..de9f9a87 100644 --- a/docs/src/tut/docs.md +++ b/docs/src/tut/docs.md @@ -907,7 +907,103 @@ Await.result(task.runAsync(ioSched), Duration.Inf) ## Custom types -TODO +If you want to run a fetch to a custom type `M[_]`, you need to implement the `FetchMonadError[M]` typeclass. `FetchMonadError[M]` is simply a `MonadError[M, Throwable]` from cats augmented +with a method for running a `Query[A]` in the context of the monad `M[A]`. + +For ilustrating integration with an asynchronous concurrency monad we'll use the implementation of Monix Task. + +### Running queries + +First of all, we need to run queries in our target type. As we have learned, queries can be synchronous (simply wrapping an `Eval` from Cats) or asynchronous. Since we'll need to lift +`Eval[A]` values to `Task[A]`, let's write a function for doing so first. Note that Monix's `Task` supports the same evaluation strategies of `Eval` in Cats, so the conversion is very +direct: + +```tut:silent +import cats.{Eval, Now, Later, Always} +import monix.eval.Task + +def evalToTask[A](e: Eval[A]): Task[A] = e match { + case Now(x) => Task.now(x) + case l: Later[A] => Task.evalOnce({ l.value }) + case a: Always[A] => Task.evalAlways({ a.value }) + case other => Task.evalOnce({ other.value }) +} +``` + +Now that we can run synchronous queries to `Task`, we'll use `Task#create` for running asynchronous computations. Queries also have a third option: `Ap`, which delegates the applicative combination of independent queries to the target monad. + +```tut:silent +import monix.execution.Cancelable +import scala.concurrent.duration._ + +def queryToTask[A](q: Query[A]): Task[A] = q match { + case Sync(e) => evalToTask(e) + case Async(action, timeout) => { + val task: Task[A] = Task.create((scheduler, callback) => { + scheduler.execute(new Runnable { + def run() = action(callback.onSuccess, callback.onError) + }) + + Cancelable.empty + }) + + timeout match { + case finite: FiniteDuration => task.timeout(finite) + case _ => task + } + } + case Ap(qf, qx) => Task.zip2(queryToTask(qf), queryToTask(qx)).map({ case (f, x) => f(x) }) +} +``` + +The asynchronous action was built using `Task#create`; it receives the used scheduler and a callback, runs +the async action in the scheduler passing the success and error versions of the callback and returns an empty +cancelable (it can not be canceled); if we encounter a finite duration timeout, we set it on the task. + +The applicative action used `Task#zip2` to combine two tasks and apply the function contained in one of them +to the other. We used `Task#zip2` for expressing the independence between the two tasks, which can potentially +be evaluated in parallel. + +### Writing the FetchMonadError instance + +Now we're ready for implementing the FetchMonadError instance for `Task`, we need to define it as an implicit. +Note that Cats' typeclass hierarchy is expressed with inheritance and methods from weaker typeclasses like `Functor` or `Applicative` in more powerful typeclasses like `Monad` are implemented in terms of the operations of the latter. In practice, this means that if you just implement `pure` and `flatMap` the rest of the combinators like `map` are going to be implemented in terms of them. Because of this we'll override `map` for not using `flatMap` and `product` for expressing the independence of two computations. + + +```tut:silent +implicit val taskFetchMonadError: FetchMonadError[Task] = new FetchMonadError[Task] { + override def map[A, B](fa: Task[A])(f: A => B): Task[B] = + fa.map(f) + + override def product[A, B](fa: Task[A], fb: Task[B]): Task[(A, B)] = + Task.zip2(Task.fork(fa), Task.fork(fb)) // introduce parallelism with Task#fork + + override def pureEval[A](e: Eval[A]): Task[A] = evalToTask(e) + + def pure[A](x: A): Task[A] = + Task.now(x) + + def handleErrorWith[A](fa: Task[A])(f: Throwable => Task[A]): Task[A] = + fa.onErrorHandleWith(f) + + def raiseError[A](e: Throwable): Task[A] = + Task.raiseError(e) + + def flatMap[A, B](fa: Task[A])(f: A => Task[B]): Task[B] = + fa.flatMap(f) + + override def runQuery[A](q: Query[A]): Task[A] = queryToTask(q) +} +``` + +We can now import the above implicit and run a fetch to our custom type, let's give it a go: + +```tut:book +val task = Fetch.run(homePage)(taskFetchMonadError) + +Await.result(task.runAsync(scheduler), Duration.Inf) +``` + # Resources diff --git a/monix/shared/src/main/scala/implicits.scala b/monix/shared/src/main/scala/implicits.scala index f6fe1f96..39e56f19 100644 --- a/monix/shared/src/main/scala/implicits.scala +++ b/monix/shared/src/main/scala/implicits.scala @@ -54,7 +54,7 @@ object implicits { def flatMap[A, B](fa: Task[A])(f: A => Task[B]): Task[B] = fa.flatMap(f) - override def runQuery[A](j: Query[A]): Task[A] = j match { + override def runQuery[A](q: Query[A]): Task[A] = q match { case Sync(x) => evalToTask(x) case Async(ac, timeout) => { val task: Task[A] = Task.create( From 0d924689e0c3c3ebf444af8c16bfa82ebdfb8150 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Mon, 6 Jun 2016 11:48:46 +0200 Subject: [PATCH 28/40] Fix dependency specs --- docs/src/tut/docs.md | 6 +++--- docs/src/tut/index.md | 4 ++-- tut/README.md | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/src/tut/docs.md b/docs/src/tut/docs.md index de9f9a87..fedbff23 100644 --- a/docs/src/tut/docs.md +++ b/docs/src/tut/docs.md @@ -29,13 +29,13 @@ we read) concerns. To begin, add the following dependency to your SBT build file: ```scala -"com.fortysevendeg" %% "fetch" %% "0.2.0" +"com.fortysevendeg" %% "fetch" % "0.2.0" ``` Or, if using Scala.js: ```scala -"com.fortysevendeg" %%% "fetch" %% "0.2.0" +"com.fortysevendeg" %%% "fetch" % "0.2.0" ``` Now you'll have Fetch available in both Scala and Scala.js. @@ -874,7 +874,7 @@ The [Monix](https://monix.io/) library provides an abstraction for lazy, asynchr For using `Task` as the target concurrency monad of a fetch, add the following dependency to your build file: ```scala -"com.fortysevendeg" %% "fetch-monix" %% "0.2.0" +"com.fortysevendeg" %% "fetch-monix" % "0.2.0" ``` And do some standard imports, we'll need an Scheduler for running our tasks as well as the instance of `FetchMonadError[Task]` that `fetch-monix` provids: diff --git a/docs/src/tut/index.md b/docs/src/tut/index.md index 974086b4..257e005a 100644 --- a/docs/src/tut/index.md +++ b/docs/src/tut/index.md @@ -13,13 +13,13 @@ Add the following dependency to your project's build file. For Scala 2.11.x: ```scala -"com.fortysevendeg" %% "fetch" %% "0.2.0" +"com.fortysevendeg" %% "fetch" % "0.2.0" ``` Or, if using Scala.js (0.6.x): ```scala -"com.fortysevendeg" %%% "fetch" %% "0.2.0" +"com.fortysevendeg" %%% "fetch" % "0.2.0" ``` ```tut:invisible diff --git a/tut/README.md b/tut/README.md index 9abe318a..a44e9cbc 100644 --- a/tut/README.md +++ b/tut/README.md @@ -15,13 +15,13 @@ Add the following dependency to your project's build file. For Scala 2.11.x: ```scala -"com.fortysevendeg" %% "fetch" %% "0.2.0" +"com.fortysevendeg" %% "fetch" % "0.2.0" ``` Or, if using Scala.js (0.6.x): ```scala -"com.fortysevendeg" %%% "fetch" %% "0.2.0" +"com.fortysevendeg" %%% "fetch" % "0.2.0" ``` ```tut:invisible From 158a12a99db11774c1d64bf9a246e77a0d9571c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Mon, 6 Jun 2016 11:48:59 +0200 Subject: [PATCH 29/40] Typo --- docs/src/tut/index.md | 2 +- tut/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/tut/index.md b/docs/src/tut/index.md index 257e005a..61f8c420 100644 --- a/docs/src/tut/index.md +++ b/docs/src/tut/index.md @@ -92,7 +92,7 @@ Now that we can convert `Int` values to `Fetch[String]`, let's try creating a fe val fetchOne: Fetch[String] = fetchString(1) ``` -We'll run our fetches to the ambiend `Id` monad in our examples, let's do some imports. +We'll run our fetches to the ambient `Id` monad in our examples, let's do some imports. ```tut:silent import cats.Id diff --git a/tut/README.md b/tut/README.md index a44e9cbc..453cdb0b 100644 --- a/tut/README.md +++ b/tut/README.md @@ -94,7 +94,7 @@ import fetch.syntax._ val fetchOne: Fetch[String] = fetchString(1) ``` -We'll run our fetches to the ambiend `Id` monad in our examples, let's do some imports. +We'll run our fetches to the ambien `Id` monad in our examples, let's do some imports. ```tut:silent import cats.Id From 2234f6871ba57aef05b257cf1cf3cc0972fc7023 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Mon, 6 Jun 2016 11:49:57 +0200 Subject: [PATCH 30/40] Concurrency monads --- tut/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tut/README.md b/tut/README.md index 453cdb0b..d48e904f 100644 --- a/tut/README.md +++ b/tut/README.md @@ -102,7 +102,7 @@ import fetch.unsafe.implicits._ import fetch.syntax._ ``` -Note that in real-life scenarios you'll want to run a fetch to a concurrency monad, synchronous execution of a fetch +Note that in real-life scenarios you'll want to run a fetch to a concurrency monad such as `Future` or `Task`, synchronous execution of a fetch is only supported in Scala and not Scala.js and is meant for experimentation purposes. Let's run it and wait for the fetch to complete: From a34ae16b155486db0f313da3866ca1d28fe67e0f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Mon, 6 Jun 2016 11:50:38 +0200 Subject: [PATCH 31/40] :fire: --- tut/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tut/README.md b/tut/README.md index d48e904f..cbcc19db 100644 --- a/tut/README.md +++ b/tut/README.md @@ -158,7 +158,7 @@ And now we can easily receive data from the two sources in a single fetch. val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled ``` -Note how the two independent data fetches are run in parallel, minimizing the latency cost of querying the two data sources. +Note how the two independent data fetches run in parallel, minimizing the latency cost of querying the two data sources. ```tut:book fetchMulti.runA[Id] From ad842d69768dc1e9f38f4b9b9ac7d6d5c0a2cd3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Mon, 6 Jun 2016 11:51:44 +0200 Subject: [PATCH 32/40] :fire: are --- docs/src/tut/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/tut/index.md b/docs/src/tut/index.md index 61f8c420..d6c9bcf3 100644 --- a/docs/src/tut/index.md +++ b/docs/src/tut/index.md @@ -158,7 +158,7 @@ And now we can easily receive data from the two sources in a single fetch. val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled ``` -Note how the two independent data fetches are run in parallel, minimizing the latency cost of querying the two data sources. +Note how the two independent data fetches run in parallel, minimizing the latency cost of querying the two data sources. ```tut:book fetchMulti.runA[Id] From d22eefefdba2e59a3e14510091060b07c4c395ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Mon, 6 Jun 2016 11:54:53 +0200 Subject: [PATCH 33/40] Update README --- README.md | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 4dc9d0c5..591568ec 100644 --- a/README.md +++ b/README.md @@ -15,13 +15,13 @@ Add the following dependency to your project's build file. For Scala 2.11.x: ```scala -"com.fortysevendeg" %% "fetch" %% "0.2.0" +"com.fortysevendeg" %% "fetch" % "0.2.0" ``` Or, if using Scala.js (0.6.x): ```scala -"com.fortysevendeg" %%% "fetch" %% "0.2.0" +"com.fortysevendeg" %%% "fetch" % "0.2.0" ``` Fetch is available for the following Scala and Scala.js versions: @@ -89,7 +89,7 @@ import fetch.syntax._ val fetchOne: Fetch[String] = fetchString(1) ``` -We'll run our fetches to the ambiend `Id` monad in our examples, let's do some imports. +We'll run our fetches to the ambien `Id` monad in our examples, let's do some imports. ```scala import cats.Id @@ -97,14 +97,14 @@ import fetch.unsafe.implicits._ import fetch.syntax._ ``` -Note that in real-life scenarios you'll want to run a fetch to a concurrency monad, synchronous execution of a fetch +Note that in real-life scenarios you'll want to run a fetch to a concurrency monad such as `Future` or `Task`, synchronous execution of a fetch is only supported in Scala and not Scala.js and is meant for experimentation purposes. Let's run it and wait for the fetch to complete: ```scala fetchOne.runA[Id] -// [111] One ToString 1 +// [42] One ToString 1 // res3: cats.Id[String] = 1 ``` @@ -122,7 +122,7 @@ When executing the above fetch, note how the three identities get batched and th ```scala fetchThree.runA[Id] -// [111] Many ToString OneAnd(1,List(2, 3)) +// [42] Many ToString OneAnd(1,List(2, 3)) // res5: cats.Id[(String, String, String)] = (1,2,3) ``` @@ -157,12 +157,12 @@ And now we can easily receive data from the two sources in a single fetch. val fetchMulti: Fetch[(String, Int)] = (fetchString(1) |@| fetchLength("one")).tupled ``` -Note how the two independent data fetches are run in parallel, minimizing the latency cost of querying the two data sources. +Note how the two independent data fetches run in parallel, minimizing the latency cost of querying the two data sources. ```scala fetchMulti.runA[Id] -// [111] One ToString 1 -// [112] One Length one +// [42] One ToString 1 +// [43] One Length one // res7: cats.Id[(String, Int)] = (1,3) ``` @@ -181,6 +181,6 @@ While running it, notice that the data source is only queried once. The next tim ```scala fetchTwice.runA[Id] -// [111] One ToString 1 +// [42] One ToString 1 // res8: cats.Id[(String, String)] = (1,1) ``` From 8088021c5d3cbc992efe1ea24b41a0d80902bc2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Tue, 7 Jun 2016 10:40:38 +0200 Subject: [PATCH 34/40] Minor changes to README --- README.md | 14 ++++++-------- tut/README.md | 2 +- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 591568ec..0efcf8cb 100644 --- a/README.md +++ b/README.md @@ -24,10 +24,8 @@ Or, if using Scala.js (0.6.x): "com.fortysevendeg" %%% "fetch" % "0.2.0" ``` -Fetch is available for the following Scala and Scala.js versions: -- Scala 2.11.x -- Scala.js 0.6.x + ## Remote data @@ -104,7 +102,7 @@ Let's run it and wait for the fetch to complete: ```scala fetchOne.runA[Id] -// [42] One ToString 1 +// [46] One ToString 1 // res3: cats.Id[String] = 1 ``` @@ -122,7 +120,7 @@ When executing the above fetch, note how the three identities get batched and th ```scala fetchThree.runA[Id] -// [42] Many ToString OneAnd(1,List(2, 3)) +// [46] Many ToString OneAnd(1,List(2, 3)) // res5: cats.Id[(String, String, String)] = (1,2,3) ``` @@ -161,8 +159,8 @@ Note how the two independent data fetches run in parallel, minimizing the latenc ```scala fetchMulti.runA[Id] -// [42] One ToString 1 -// [43] One Length one +// [46] One ToString 1 +// [47] One Length one // res7: cats.Id[(String, Int)] = (1,3) ``` @@ -181,6 +179,6 @@ While running it, notice that the data source is only queried once. The next tim ```scala fetchTwice.runA[Id] -// [42] One ToString 1 +// [46] One ToString 1 // res8: cats.Id[(String, String)] = (1,1) ``` diff --git a/tut/README.md b/tut/README.md index cbcc19db..b100ab0a 100644 --- a/tut/README.md +++ b/tut/README.md @@ -41,7 +41,7 @@ has a latency cost, such as databases or web services. ## Define your data sources -To tell Fetch how to get the data you want, you must implement the `DataSource` typeclass. Data sources have `fetchOne` and `fetchMany` methods that define how to fetch such a piece of data. +To tell `Fetch` how to get the data you want, you must implement the `DataSource` typeclass. Data sources have `fetchOne` and `fetchMany` methods that define how to fetch such a piece of data. Data Sources take two type parameters: From b229d8f9fbc9db3a4479148030e559ca537ad555 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Tue, 7 Jun 2016 10:43:43 +0200 Subject: [PATCH 35/40] Add monix tests to CI --- .travis.yml | 4 ++-- monix/shared/src/test/scala/FetchTaskTests.scala | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 5c4bd616..6de24152 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,8 +4,8 @@ scala: jdk: - oraclejdk8 script: - - sbt coverage 'fetchJVM/test' 'fetchJVM/coverageReport' - - sbt 'fetchJS/test' + - sbt coverage 'fetchJVM/test' 'monixJVM/test' 'coverageReport' + - sbt 'fetchJS/test' 'monixJS/test' - sbt 'docs/tut' - sbt 'readme/tut' after_success: diff --git a/monix/shared/src/test/scala/FetchTaskTests.scala b/monix/shared/src/test/scala/FetchTaskTests.scala index bc7109b9..ee90c7ae 100644 --- a/monix/shared/src/test/scala/FetchTaskTests.scala +++ b/monix/shared/src/test/scala/FetchTaskTests.scala @@ -72,7 +72,7 @@ class FetchTaskTests extends AsyncFreeSpec with Matchers { fut.map(_ shouldEqual Article(1, "An article with id 1")) } - "We can combine several async data sources and interpret a fetch into a future" in { + "We can combine several async data sources and interpret a fetch into a task" in { val fetch: Fetch[(Article, Author)] = for { art <- article(1) author <- author(art) @@ -84,7 +84,7 @@ class FetchTaskTests extends AsyncFreeSpec with Matchers { fut.map(_ shouldEqual (Article(1, "An article with id 1"), Author(2, "@egg2"))) } - "We can use combinators in a for comprehension and interpret a fetch from async sources into a future" in { + "We can use combinators in a for comprehension and interpret a fetch from async sources into a task" in { val fetch: Fetch[List[Article]] = for { articles <- Fetch.traverse(List(1, 1, 2))(article) } yield articles @@ -101,7 +101,7 @@ class FetchTaskTests extends AsyncFreeSpec with Matchers { ) } - "We can use combinators and multiple sources in a for comprehension and interpret a fetch from async sources into a future" in { + "We can use combinators and multiple sources in a for comprehension and interpret a fetch from async sources into a task" in { val fetch = for { articles <- Fetch.traverse(List(1, 1, 2))(article) authors <- Fetch.traverse(articles)(author) From 8525f677d7db8fc3023e19a809895bb0f613f4ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Tue, 7 Jun 2016 11:05:00 +0200 Subject: [PATCH 36/40] Unbreak Travis config --- .travis.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 6de24152..51014722 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,8 +4,9 @@ scala: jdk: - oraclejdk8 script: - - sbt coverage 'fetchJVM/test' 'monixJVM/test' 'coverageReport' - - sbt 'fetchJS/test' 'monixJS/test' + - sbt coverage 'fetchJVM/test' 'coverageReport' + - sbt 'monixJVM/test' 'monixJS/test' + - sbt 'fetchJS/test' - sbt 'docs/tut' - sbt 'readme/tut' after_success: From 20172b039fae202f7acaf818aec8e432fb59eb3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Tue, 7 Jun 2016 11:05:09 +0200 Subject: [PATCH 37/40] Improvements to README --- README.md | 19 ++++++++----------- tut/README.md | 9 +++------ 2 files changed, 11 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index 0efcf8cb..0da9806f 100644 --- a/README.md +++ b/README.md @@ -39,8 +39,8 @@ To tell `Fetch` how to get the data you want, you must implement the `DataSource Data Sources take two type parameters:
    -
  1. Identity is a type that has enough information to fetch the data
  2. -
  3. Result is the type of data we want to fetch
  4. +
  5. Identity is a type that has enough information to fetch the data. For a users data source, this would be a user's unique ID.
  6. +
  7. Result is the type of data we want to fetch. For a users data source, this would the `User` type.
```scala @@ -87,7 +87,7 @@ import fetch.syntax._ val fetchOne: Fetch[String] = fetchString(1) ``` -We'll run our fetches to the ambien `Id` monad in our examples, let's do some imports. +We'll run our fetches to the ambien `Id` monad in our examples. Note that in real-life scenarios you'll want to run a fetch to a concurrency monad such as `Future` or `Task`, synchronous execution of a fetch is only supported in Scala and not Scala.js and is meant for experimentation purposes. ```scala import cats.Id @@ -95,14 +95,11 @@ import fetch.unsafe.implicits._ import fetch.syntax._ ``` -Note that in real-life scenarios you'll want to run a fetch to a concurrency monad such as `Future` or `Task`, synchronous execution of a fetch -is only supported in Scala and not Scala.js and is meant for experimentation purposes. - Let's run it and wait for the fetch to complete: ```scala fetchOne.runA[Id] -// [46] One ToString 1 +// [182] One ToString 1 // res3: cats.Id[String] = 1 ``` @@ -120,7 +117,7 @@ When executing the above fetch, note how the three identities get batched and th ```scala fetchThree.runA[Id] -// [46] Many ToString OneAnd(1,List(2, 3)) +// [182] Many ToString OneAnd(1,List(2, 3)) // res5: cats.Id[(String, String, String)] = (1,2,3) ``` @@ -159,8 +156,8 @@ Note how the two independent data fetches run in parallel, minimizing the latenc ```scala fetchMulti.runA[Id] -// [46] One ToString 1 -// [47] One Length one +// [182] One ToString 1 +// [183] One Length one // res7: cats.Id[(String, Int)] = (1,3) ``` @@ -179,6 +176,6 @@ While running it, notice that the data source is only queried once. The next tim ```scala fetchTwice.runA[Id] -// [46] One ToString 1 +// [182] One ToString 1 // res8: cats.Id[(String, String)] = (1,1) ``` diff --git a/tut/README.md b/tut/README.md index b100ab0a..4d8d37b6 100644 --- a/tut/README.md +++ b/tut/README.md @@ -46,8 +46,8 @@ To tell `Fetch` how to get the data you want, you must implement the `DataSource Data Sources take two type parameters:
    -
  1. Identity is a type that has enough information to fetch the data
  2. -
  3. Result is the type of data we want to fetch
  4. +
  5. Identity is a type that has enough information to fetch the data. For a users data source, this would be a user's unique ID.
  6. +
  7. Result is the type of data we want to fetch. For a users data source, this would the `User` type.
```scala @@ -94,7 +94,7 @@ import fetch.syntax._ val fetchOne: Fetch[String] = fetchString(1) ``` -We'll run our fetches to the ambien `Id` monad in our examples, let's do some imports. +We'll run our fetches to the ambien `Id` monad in our examples. Note that in real-life scenarios you'll want to run a fetch to a concurrency monad such as `Future` or `Task`, synchronous execution of a fetch is only supported in Scala and not Scala.js and is meant for experimentation purposes. ```tut:silent import cats.Id @@ -102,9 +102,6 @@ import fetch.unsafe.implicits._ import fetch.syntax._ ``` -Note that in real-life scenarios you'll want to run a fetch to a concurrency monad such as `Future` or `Task`, synchronous execution of a fetch -is only supported in Scala and not Scala.js and is meant for experimentation purposes. - Let's run it and wait for the fetch to complete: ```tut:book From 00e0a7dff9e50df9b6d05b247dba9bc1e7b78655 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Fri, 10 Jun 2016 12:14:12 +0200 Subject: [PATCH 38/40] Minor changes to DataSourceCache trait --- docs/src/tut/docs.md | 4 ++-- shared/src/main/scala/cache.scala | 6 +++--- shared/src/main/scala/fetch.scala | 12 +++++------- shared/src/main/scala/interpreters.scala | 4 ++-- shared/src/test/scala/FetchTests.scala | 4 ++-- 5 files changed, 14 insertions(+), 16 deletions(-) diff --git a/docs/src/tut/docs.md b/docs/src/tut/docs.md index fedbff23..301413aa 100644 --- a/docs/src/tut/docs.md +++ b/docs/src/tut/docs.md @@ -549,7 +549,7 @@ There is no need for the cache to be mutable since fetch executions run in an in ```scala trait DataSourceCache { def update[A](k: DataSourceIdentity, v: A): DataSourceCache - def get(k: DataSourceIdentity): Option[Any] + def get[A](k: DataSourceIdentity): Option[A] } ``` @@ -557,7 +557,7 @@ Let's implement a cache that forgets everything we store in it. ```tut:silent final case class ForgetfulCache() extends DataSourceCache { - override def get(k: DataSourceIdentity): Option[Any] = None + override def get[A](k: DataSourceIdentity): Option[A] = None override def update[A](k: DataSourceIdentity, v: A): ForgetfulCache = this } ``` diff --git a/shared/src/main/scala/cache.scala b/shared/src/main/scala/cache.scala index 1cbaf1e3..9f927e6b 100644 --- a/shared/src/main/scala/cache.scala +++ b/shared/src/main/scala/cache.scala @@ -22,7 +22,7 @@ package fetch trait DataSourceCache { def update[A](k: DataSourceIdentity, v: A): DataSourceCache - def get(k: DataSourceIdentity): Option[Any] + def get[A](k: DataSourceIdentity): Option[A] def cacheResults[I, A](results: Map[I, A], ds: DataSource[I, A]): DataSourceCache = { results.foldLeft(this)({ @@ -35,8 +35,8 @@ trait DataSourceCache { * A cache that stores its elements in memory. */ case class InMemoryCache(state: Map[DataSourceIdentity, Any]) extends DataSourceCache { - override def get(k: DataSourceIdentity): Option[Any] = - state.get(k) + override def get[A](k: DataSourceIdentity): Option[A] = + state.get(k).asInstanceOf[Option[A]] override def update[A](k: DataSourceIdentity, v: A): InMemoryCache = copy(state = state.updated(k, v)) diff --git a/shared/src/main/scala/fetch.scala b/shared/src/main/scala/fetch.scala index 7f50ca23..3c0fb889 100644 --- a/shared/src/main/scala/fetch.scala +++ b/shared/src/main/scala/fetch.scala @@ -76,10 +76,10 @@ final case class FetchOne[I, A](a: I, ds: DataSource[I, A]) extends FetchOp[A] with FetchRequest[I, A] { override def fullfilledBy(cache: DataSourceCache): Boolean = { - cache.get(ds.identity(a)).isDefined + cache.get[A](ds.identity(a)).isDefined } override def missingIdentities(cache: DataSourceCache): List[I] = { - cache.get(ds.identity(a)).fold(List(a))((res: Any) => Nil) + cache.get[A](ds.identity(a)).fold(List(a))((res: A) => Nil) } override def dataSource: DataSource[I, A] = ds override def identities: NonEmptyList[I] = NonEmptyList(a, Nil) @@ -88,10 +88,10 @@ final case class FetchMany[I, A](as: NonEmptyList[I], ds: DataSource[I, A]) extends FetchOp[List[A]] with FetchRequest[I, A] { override def fullfilledBy(cache: DataSourceCache): Boolean = { - as.forall((i: I) => cache.get(ds.identity(i)).isDefined) + as.forall((i: I) => cache.get[A](ds.identity(i)).isDefined) } override def missingIdentities(cache: DataSourceCache): List[I] = { - as.unwrap.distinct.filterNot(i => cache.get(ds.identity(i)).isDefined) + as.unwrap.distinct.filterNot(i => cache.get[A](ds.identity(i)).isDefined) } override def dataSource: DataSource[I, A] = ds override def identities: NonEmptyList[I] = as @@ -230,9 +230,7 @@ object `package` { new (FetchOp ~> FetchOp) { def apply[B](f: FetchOp[B]): FetchOp[B] = f match { case one @ FetchOne(id, ds) => { - results - .get(ds.identity(id)) - .fold(one: FetchOp[B])(b => Cached(b).asInstanceOf[FetchOp[B]]) + results.get[B](ds.identity(id)).fold(one: FetchOp[B])(b => Cached(b)) } case many @ FetchMany(ids, ds) => { val fetched = ids.map(id => results.get(ds.identity(id))).unwrap.sequence diff --git a/shared/src/main/scala/interpreters.scala b/shared/src/main/scala/interpreters.scala index 50795a5f..364aebc2 100644 --- a/shared/src/main/scala/interpreters.scala +++ b/shared/src/main/scala/interpreters.scala @@ -136,7 +136,7 @@ trait FetchInterpreters { val startRound = System.nanoTime() val cache = env.cache cache - .get(ds.identity(id)) + .get[A](ds.identity(id)) .fold[M[(FetchEnv, A)]]( M.flatMap(M.runQuery(ds.fetchOne(id)))((res: Option[A]) => { val endRound = System.nanoTime() @@ -184,7 +184,7 @@ trait FetchInterpreters { true), List(id) ), - cached.asInstanceOf[A]) + cached) ) }) } diff --git a/shared/src/test/scala/FetchTests.scala b/shared/src/test/scala/FetchTests.scala index 05d88491..c4b7e4b6 100644 --- a/shared/src/test/scala/FetchTests.scala +++ b/shared/src/test/scala/FetchTests.scala @@ -690,7 +690,7 @@ class FetchTests extends AsyncFreeSpec with Matchers { } case class MyCache(state: Map[Any, Any] = Map.empty[Any, Any]) extends DataSourceCache { - override def get(k: DataSourceIdentity): Option[Any] = state.get(k) + override def get[A](k: DataSourceIdentity): Option[A] = state.get(k).asInstanceOf[Option[A]] override def update[A](k: DataSourceIdentity, v: A): MyCache = copy(state = state.updated(k, v)) } @@ -736,7 +736,7 @@ class FetchTests extends AsyncFreeSpec with Matchers { } case class ForgetfulCache() extends DataSourceCache { - override def get(k: DataSourceIdentity): Option[Any] = None + override def get[A](k: DataSourceIdentity): Option[A] = None override def update[A](k: DataSourceIdentity, v: A): ForgetfulCache = this } From daa9d5921c6483ecb95b17b82587d66da09f1517 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Fri, 10 Jun 2016 13:42:55 +0200 Subject: [PATCH 39/40] Simplify Query constructors --- README.md | 16 ++++++++-------- docs/src/tut/docs.md | 24 ++++++++++++------------ docs/src/tut/index.md | 8 ++++---- shared/src/main/scala/fetch.scala | 9 +++++---- shared/src/test/scala/FetchTests.scala | 16 ++++++++-------- tut/README.md | 6 +++--- 6 files changed, 40 insertions(+), 39 deletions(-) diff --git a/README.md b/README.md index 0da9806f..8c82e118 100644 --- a/README.md +++ b/README.md @@ -61,13 +61,13 @@ import fetch._ implicit object ToStringSource extends DataSource[Int, String]{ override def fetchOne(id: Int): Query[Option[String]] = { - Query.later({ + Query.sync({ println(s"[${Thread.currentThread.getId}] One ToString $id") Option(id.toString) }) } override def fetchMany(ids: NonEmptyList[Int]): Query[Map[Int, String]] = { - Query.later({ + Query.sync({ println(s"[${Thread.currentThread.getId}] Many ToString $ids") ids.unwrap.map(i => (i, i.toString)).toMap }) @@ -99,7 +99,7 @@ Let's run it and wait for the fetch to complete: ```scala fetchOne.runA[Id] -// [182] One ToString 1 +// [169] One ToString 1 // res3: cats.Id[String] = 1 ``` @@ -117,7 +117,7 @@ When executing the above fetch, note how the three identities get batched and th ```scala fetchThree.runA[Id] -// [182] Many ToString OneAnd(1,List(2, 3)) +// [169] Many ToString OneAnd(1,List(2, 3)) // res5: cats.Id[(String, String, String)] = (1,2,3) ``` @@ -125,7 +125,7 @@ fetchThree.runA[Id] If we combine two independent fetches from different data sources, the fetches can be run in parallel. First, let's add a data source that fetches a string's size. -This time, instead of creating the results with `Query#later` we are going to do it with `Query#async` for emulating an asynchronous data source. +This time, instead of creating the results with `Query#sync` we are going to do it with `Query#async` for emulating an asynchronous data source. ```scala implicit object LengthSource extends DataSource[String, Int]{ @@ -156,8 +156,8 @@ Note how the two independent data fetches run in parallel, minimizing the latenc ```scala fetchMulti.runA[Id] -// [182] One ToString 1 -// [183] One Length one +// [169] One ToString 1 +// [170] One Length one // res7: cats.Id[(String, Int)] = (1,3) ``` @@ -176,6 +176,6 @@ While running it, notice that the data source is only queried once. The next tim ```scala fetchTwice.runA[Id] -// [182] One ToString 1 +// [169] One ToString 1 // res8: cats.Id[(String, String)] = (1,1) ``` diff --git a/docs/src/tut/docs.md b/docs/src/tut/docs.md index 301413aa..f9582d43 100644 --- a/docs/src/tut/docs.md +++ b/docs/src/tut/docs.md @@ -127,12 +127,12 @@ val userDatabase: Map[UserId, User] = Map( implicit object UserSource extends DataSource[UserId, User]{ override def fetchOne(id: UserId): Query[Option[User]] = { - Query.later({ + Query.sync({ latency(userDatabase.get(id), s"One User $id") }) } override def fetchMany(ids: NonEmptyList[UserId]): Query[Map[UserId, User]] = { - Query.later({ + Query.sync({ latency(userDatabase.filterKeys(ids.unwrap.contains), s"Many Users $ids") }) } @@ -154,7 +154,7 @@ of `fetchMany`. Note that it will use the `fetchOne` implementation for requesti ```tut:silent implicit object UnbatchedSource extends DataSource[Int, Int]{ override def fetchOne(id: Int): Query[Option[Int]] = { - Query.now(Option(id)) + Query.sync(Option(id)) } override def fetchMany(ids: NonEmptyList[Int]): Query[Map[Int, Int]] = { batchingNotSupported(ids) @@ -279,16 +279,16 @@ which is run. Let's look at the various ways we have of constructing queries. ### Synchronous A query can be synchronous, and we may want to evaluate it when `fetchOne` and `fetchMany` -are called. We can do so with `Query#now`: +are called. We can do so with `Query#sync`: ```tut:book -Query.now(42) +Query.sync(42) ``` -You can also construct lazy queries that can evaluate synchronously with `Query#later`: +You can also construct lazy queries that can evaluate synchronously passing a thunk to `Query#sync`: ```tut:book -Query.later({ println("Computing 42"); 42 }) +Query.sync({ println("Computing 42"); 42 }) ``` Synchronous queries simply wrap a Cats' `Eval` instance, which captures the notion of a lazy synchronous @@ -297,7 +297,7 @@ computation. You can lift an `Eval[A]` into a `Query[A]` too: ```tut:book import cats.Eval -Query.sync(Eval.always({ println("Computing 42"); 42 })) +Query.eval(Eval.always({ println("Computing 42"); 42 })) ``` ### Asynchronous @@ -337,12 +337,12 @@ val postDatabase: Map[PostId, Post] = Map( implicit object PostSource extends DataSource[PostId, Post]{ override def fetchOne(id: PostId): Query[Option[Post]] = { - Query.later({ + Query.sync({ latency(postDatabase.get(id), s"One Post $id") }) } override def fetchMany(ids: NonEmptyList[PostId]): Query[Map[PostId, Post]] = { - Query.later({ + Query.sync({ latency(postDatabase.filterKeys(ids.unwrap.contains), s"Many Posts $ids") }) } @@ -368,13 +368,13 @@ We'll implement a data source for retrieving a post topic given a post id. ```tut:silent implicit object PostTopicSource extends DataSource[Post, PostTopic]{ override def fetchOne(id: Post): Query[Option[PostTopic]] = { - Query.later({ + Query.sync({ val topic = if (id.id % 2 == 0) "monad" else "applicative" latency(Option(topic), s"One Post Topic $id") }) } override def fetchMany(ids: NonEmptyList[Post]): Query[Map[Post, PostTopic]] = { - Query.later({ + Query.sync({ val result = ids.unwrap.map(id => (id, if (id.id % 2 == 0) "monad" else "applicative")).toMap latency(result, s"Many Post Topics $ids") }) diff --git a/docs/src/tut/index.md b/docs/src/tut/index.md index d6c9bcf3..7f3eb0e7 100644 --- a/docs/src/tut/index.md +++ b/docs/src/tut/index.md @@ -57,7 +57,7 @@ trait DataSource[Identity, Result]{ } ``` -Note that when we create a query we can choose to compute its result right away (`Query#now`), defer its evaluation (`Query#later`) or make it asynchronous (`Query#async`). Returning `Query` instances from the fetch methods allows us to abstract from the target result type and to run it synchronously or asynchronously. +Note that when we create a query we can compute its result right away, defer its evaluation or make it asynchronous. Returning `Query` instances from the fetch methods allows us to abstract from the target result type and to run it synchronously or asynchronously. We'll implement a dummy data source that can convert integers to strings. For convenience, we define a `fetchString` function that lifts identities (`Int` in our dummy data source) to a `Fetch`. @@ -68,13 +68,13 @@ import fetch._ implicit object ToStringSource extends DataSource[Int, String]{ override def fetchOne(id: Int): Query[Option[String]] = { - Query.later({ + Query.sync({ println(s"[${Thread.currentThread.getId}] One ToString $id") Option(id.toString) }) } override def fetchMany(ids: NonEmptyList[Int]): Query[Map[Int, String]] = { - Query.later({ + Query.sync({ println(s"[${Thread.currentThread.getId}] Many ToString $ids") ids.unwrap.map(i => (i, i.toString)).toMap }) @@ -131,7 +131,7 @@ fetchThree.runA[Id] If we combine two independent fetches from different data sources, the fetches can be run in parallel. First, let's add a data source that fetches a string's size. -This time, instead of creating the results with `Query#later` we are going to do it with `Query#async` for emulating an asynchronous data source. +This time, instead of creating the results with `Query#sync` we are going to do it with `Query#async` for emulating an asynchronous data source. ```tut:silent implicit object LengthSource extends DataSource[String, Int]{ diff --git a/shared/src/main/scala/fetch.scala b/shared/src/main/scala/fetch.scala index 3c0fb889..f44ecdaa 100644 --- a/shared/src/main/scala/fetch.scala +++ b/shared/src/main/scala/fetch.scala @@ -41,17 +41,18 @@ object Query { type Callback[A] = A => Unit type Errback = Throwable => Unit - def now[A](x: A): Query[A] = Sync(Eval.now(x)) - def later[A](th: => A): Query[A] = Sync(Eval.later(th)) + def eval[A](e: Eval[A]): Query[A] = Sync(e) + + def sync[A](th: => A): Query[A] = Sync(Eval.later(th)) - def sync[A](e: Eval[A]) = Sync(e) def async[A]( action: (Callback[A], Errback) => Unit, timeout: Duration = Duration.Inf ): Query[A] = Async(action, timeout) implicit val fetchQueryApplicative: Applicative[Query] = new Applicative[Query] { - def pure[A](x: A): Query[A] = Sync(Eval.now(x)) + override def pureEval[A](e: Eval[A]): Query[A] = Sync(e) + def pure[A](x: A): Query[A] = Sync(Eval.now(x)) def ap[A, B](ff: Query[A => B])(fa: Query[A]): Query[B] = Ap(ff, fa) } diff --git a/shared/src/test/scala/FetchTests.scala b/shared/src/test/scala/FetchTests.scala index c4b7e4b6..51212a70 100644 --- a/shared/src/test/scala/FetchTests.scala +++ b/shared/src/test/scala/FetchTests.scala @@ -35,10 +35,10 @@ object TestHelper { implicit object OneSource extends DataSource[One, Int] { override def name = "OneSource" override def fetchOne(id: One): Query[Option[Int]] = { - Query.now(Option(id.id)) + Query.sync(Option(id.id)) } override def fetchMany(ids: NonEmptyList[One]): Query[Map[One, Int]] = - Query.now(ids.unwrap.map(one => (one, one.id)).toMap) + Query.sync(ids.unwrap.map(one => (one, one.id)).toMap) } def one(id: Int): Fetch[Int] = Fetch(One(id)) @@ -46,9 +46,9 @@ object TestHelper { implicit object AnotheroneSource extends DataSource[AnotherOne, Int] { override def name = "AnotherOneSource" override def fetchOne(id: AnotherOne): Query[Option[Int]] = - Query.now(Option(id.id)) + Query.sync(Option(id.id)) override def fetchMany(ids: NonEmptyList[AnotherOne]): Query[Map[AnotherOne, Int]] = - Query.now(ids.unwrap.map(anotherone => (anotherone, anotherone.id)).toMap) + Query.sync(ids.unwrap.map(anotherone => (anotherone, anotherone.id)).toMap) } def anotherOne(id: Int): Fetch[Int] = Fetch(AnotherOne(id)) @@ -56,18 +56,18 @@ object TestHelper { implicit object ManySource extends DataSource[Many, List[Int]] { override def name = "ManySource" override def fetchOne(id: Many): Query[Option[List[Int]]] = - Query.now(Option(0 until id.n toList)) + Query.sync(Option(0 until id.n toList)) override def fetchMany(ids: NonEmptyList[Many]): Query[Map[Many, List[Int]]] = - Query.now(ids.unwrap.map(m => (m, 0 until m.n toList)).toMap) + Query.sync(ids.unwrap.map(m => (m, 0 until m.n toList)).toMap) } case class Never() implicit object NeverSource extends DataSource[Never, Int] { override def name = "NeverSource" override def fetchOne(id: Never): Query[Option[Int]] = - Query.now(None) + Query.sync(None) override def fetchMany(ids: NonEmptyList[Never]): Query[Map[Never, Int]] = - Query.now(Map.empty[Never, Int]) + Query.sync(Map.empty[Never, Int]) } def many(id: Int): Fetch[List[Int]] = Fetch(Many(id)) diff --git a/tut/README.md b/tut/README.md index 4d8d37b6..ebde2bc1 100644 --- a/tut/README.md +++ b/tut/README.md @@ -68,13 +68,13 @@ import fetch._ implicit object ToStringSource extends DataSource[Int, String]{ override def fetchOne(id: Int): Query[Option[String]] = { - Query.later({ + Query.sync({ println(s"[${Thread.currentThread.getId}] One ToString $id") Option(id.toString) }) } override def fetchMany(ids: NonEmptyList[Int]): Query[Map[Int, String]] = { - Query.later({ + Query.sync({ println(s"[${Thread.currentThread.getId}] Many ToString $ids") ids.unwrap.map(i => (i, i.toString)).toMap }) @@ -128,7 +128,7 @@ fetchThree.runA[Id] If we combine two independent fetches from different data sources, the fetches can be run in parallel. First, let's add a data source that fetches a string's size. -This time, instead of creating the results with `Query#later` we are going to do it with `Query#async` for emulating an asynchronous data source. +This time, instead of creating the results with `Query#sync` we are going to do it with `Query#async` for emulating an asynchronous data source. ```tut:silent implicit object LengthSource extends DataSource[String, Int]{ From d80da1f3e4d05854788dad62f029784f883c107d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alejandro=20G=C3=B3mez?= Date: Fri, 10 Jun 2016 13:44:25 +0200 Subject: [PATCH 40/40] Report coverage after running JVM tests --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 51014722..ce067407 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,7 +4,7 @@ scala: jdk: - oraclejdk8 script: - - sbt coverage 'fetchJVM/test' 'coverageReport' + - sbt coverage 'fetchJVM/test' 'fetchJVM/coverageReport' - sbt 'monixJVM/test' 'monixJS/test' - sbt 'fetchJS/test' - sbt 'docs/tut'