diff --git a/.gitignore b/.gitignore
index 95e1782..954faa9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -14,4 +14,7 @@
*~
\#*\#
.\#*
-.projectile
\ No newline at end of file
+.projectile
+
+# Ignore kafka and zookeper logs
+resources/kafka/data
diff --git a/.scalafmt.conf b/.scalafmt.conf
new file mode 100644
index 0000000..c4eb709
--- /dev/null
+++ b/.scalafmt.conf
@@ -0,0 +1,11 @@
+version=2.0.0-RC5
+
+project.git = true
+maxColumn = 120
+docstrings = ScalaDoc
+align = more
+lineEndings = unix
+rewrite.rules = [SortModifiers]
+spaces.beforeContextBoundColon = false
+optIn.blankLineBeforeDocstring = true
+includeCurlyBraceInSelectChains = false
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 7a2dbb3..70caa09 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -1 +1,2 @@
-addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.2.4")
\ No newline at end of file
+addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.2.4")
+addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.0.0")
\ No newline at end of file
diff --git a/resources/kafka/clean-topics.sh b/resources/kafka/clean-topics.sh
old mode 100644
new mode 100755
diff --git a/resources/kafka/start-servers.sh b/resources/kafka/start-servers.sh
old mode 100644
new mode 100755
diff --git a/resources/kafka/stop-servers.sh b/resources/kafka/stop-servers.sh
old mode 100644
new mode 100755
diff --git a/src/com/workflowfm/pew/PiEventHandler.scala b/src/com/workflowfm/pew/PiEventHandler.scala
index 051fd19..aa97aed 100644
--- a/src/com/workflowfm/pew/PiEventHandler.scala
+++ b/src/com/workflowfm/pew/PiEventHandler.scala
@@ -7,7 +7,6 @@ import com.workflowfm.pew.PiMetadata.{PiMetadataMap, SimulatedTime, SystemTime}
import scala.collection.immutable.Queue
import scala.concurrent.{ExecutionContext, Future, Promise}
-
///////////////////////
// Abstract PiEvents //
///////////////////////
@@ -32,12 +31,12 @@ sealed trait PiEvent[KeyT] {
/** @return The system time (in milliseconds) when this PiEvent
* actually occured during computation.
*/
- def rawTime: Long = SystemTime( metadata )
+ def rawTime: Long = SystemTime(metadata)
/** @return The simulated time (in its own units) when the real-life
* event that is represented by this PiEvent occured.
*/
- def simTime: Long = SimulatedTime( metadata )
+ def simTime: Long = SimulatedTime(metadata)
def asString: String
}
@@ -52,40 +51,38 @@ object PiEvent {
* @tparam KeyT The type used to identify PiInstances.
* @return A function to which returns events with modified metadata.
*/
- def liftMetaFn[KeyT]( fn: PiMetadataMap => PiMetadataMap )
- : PiEvent[KeyT] => PiEvent[KeyT] = {
+ def liftMetaFn[KeyT](fn: PiMetadataMap => PiMetadataMap): PiEvent[KeyT] => PiEvent[KeyT] = {
// PiInstance Level PiEvents
- case e: PiEventStart[KeyT] => e.copy( metadata = fn( e.metadata ) )
- case e: PiEventResult[KeyT] => e.copy( metadata = fn( e.metadata ) )
+ case e: PiEventStart[KeyT] => e.copy(metadata = fn(e.metadata))
+ case e: PiEventResult[KeyT] => e.copy(metadata = fn(e.metadata))
// PiInstance Level PiFailures
- case e: PiFailureNoResult[KeyT] => e.copy( metadata = fn( e.metadata ) )
- case e: PiFailureUnknownProcess[KeyT] => e.copy( metadata = fn( e.metadata ) )
- case e: PiFailureAtomicProcessIsComposite[KeyT] => e.copy( metadata = fn( e.metadata ) )
- case e: PiFailureNoSuchInstance[KeyT] => e.copy( metadata = fn( e.metadata ) )
- case e: PiFailureExceptions[KeyT] => e.copy( metadata = fn( e.metadata ) )
+ case e: PiFailureNoResult[KeyT] => e.copy(metadata = fn(e.metadata))
+ case e: PiFailureUnknownProcess[KeyT] => e.copy(metadata = fn(e.metadata))
+ case e: PiFailureAtomicProcessIsComposite[KeyT] => e.copy(metadata = fn(e.metadata))
+ case e: PiFailureNoSuchInstance[KeyT] => e.copy(metadata = fn(e.metadata))
+ case e: PiFailureExceptions[KeyT] => e.copy(metadata = fn(e.metadata))
// PiInstance-Call Level PiEvents
- case e: PiEventCall[KeyT] => e.copy( metadata = fn( e.metadata ) )
- case e: PiEventReturn[KeyT] => e.copy( metadata = fn( e.metadata ) )
+ case e: PiEventCall[KeyT] => e.copy(metadata = fn(e.metadata))
+ case e: PiEventReturn[KeyT] => e.copy(metadata = fn(e.metadata))
// PiInstance-Call Level PiFailures
- case e: PiFailureAtomicProcessException[KeyT] => e.copy( metadata = fn( e.metadata ) )
+ case e: PiFailureAtomicProcessException[KeyT] => e.copy(metadata = fn(e.metadata))
}
}
-
///////////////////////////////
// PiInstance Level PiEvents //
///////////////////////////////
case class PiEventStart[KeyT](
- i: PiInstance[KeyT],
- override val metadata: PiMetadataMap = PiMetadata()
- ) extends PiEvent[KeyT] {
+ i: PiInstance[KeyT],
+ override val metadata: PiMetadataMap = PiMetadata()
+) extends PiEvent[KeyT] {
- override def id: KeyT = i.id
+ override def id: KeyT = i.id
override def asString: String = s" === [$id] INITIAL STATE === \n${i.state}\n === === === === === === === ==="
}
@@ -97,21 +94,20 @@ case class PiEventResult[KeyT](
i: PiInstance[KeyT],
res: Any,
override val metadata: PiMetadataMap = PiMetadata()
-
- ) extends PiEvent[KeyT] with PiEventFinish[KeyT] {
+) extends PiEvent[KeyT]
+ with PiEventFinish[KeyT] {
override def id: KeyT = i.id
- override def asString: String = s" === [$id] FINAL STATE === \n${i.state}\n === === === === === === === ===\n" +
- s" === [$id] RESULT: $res"
+ override def asString: String =
+ s" === [$id] FINAL STATE === \n${i.state}\n === === === === === === === ===\n" +
+ s" === [$id] RESULT: $res"
}
-
////////////////////////////////////////////
// PiInstance Level Exceptions & Failures //
////////////////////////////////////////////
-sealed trait PiFailure[KeyT]
- extends PiEvent[KeyT] {
+sealed trait PiFailure[KeyT] extends PiEvent[KeyT] {
def exception: PiException[KeyT]
@@ -121,14 +117,14 @@ sealed trait PiFailure[KeyT]
val ex: PiException[KeyT] = exception
val typeName: String = ex.getClass.getSimpleName
- val message: String = ex.getMessage
+ val message: String = ex.getMessage
s"PiEe($typeName):$id($message)"
}
}
object PiFailure {
- def condense[KeyT]( errors: Seq[PiFailure[KeyT]] ): PiFailure[KeyT] = {
+ def condense[KeyT](errors: Seq[PiFailure[KeyT]]): PiFailure[KeyT] = {
errors.head match {
case PiFailureAtomicProcessException(id, ref, message, trace, metadata) =>
PiFailureExceptions(id, message, trace, metadata)
@@ -138,54 +134,57 @@ object PiFailure {
}
case class PiFailureNoResult[KeyT](
- i:PiInstance[KeyT],
+ i: PiInstance[KeyT],
override val metadata: PiMetadataMap = PiMetadata()
-
- ) extends PiFailure[KeyT] with PiEventFinish[KeyT] {
+) extends PiFailure[KeyT]
+ with PiEventFinish[KeyT] {
override def id: KeyT = i.id
- override def asString: String = s" === [$id] FINAL STATE ===\n${i.state}\n === === === === === === === ===\n === [$id] NO RESULT! ==="
+ override def asString: String =
+ s" === [$id] FINAL STATE ===\n${i.state}\n === === === === === === === ===\n === [$id] NO RESULT! ==="
- override def exception: PiException[KeyT] = NoResultException[KeyT]( i )
+ override def exception: PiException[KeyT] = NoResultException[KeyT](i)
}
case class PiFailureUnknownProcess[KeyT](
i: PiInstance[KeyT],
process: String,
override val metadata: PiMetadataMap = PiMetadata()
-
- ) extends PiFailure[KeyT] with PiEventFinish[KeyT] {
+) extends PiFailure[KeyT]
+ with PiEventFinish[KeyT] {
override def id: KeyT = i.id
- override def asString: String = s" === [$id] FINAL STATE === \n${i.state}\n === === === === === === === ===\n" +
- s" === [$id] FAILED - Unknown process: $process"
+ override def asString: String =
+ s" === [$id] FINAL STATE === \n${i.state}\n === === === === === === === ===\n" +
+ s" === [$id] FAILED - Unknown process: $process"
- override def exception: PiException[KeyT] = UnknownProcessException[KeyT]( i, process )
+ override def exception: PiException[KeyT] = UnknownProcessException[KeyT](i, process)
}
case class PiFailureAtomicProcessIsComposite[KeyT](
i: PiInstance[KeyT],
process: String,
override val metadata: PiMetadataMap = PiMetadata()
-
- ) extends PiFailure[KeyT] with PiEventFinish[KeyT] {
+) extends PiFailure[KeyT]
+ with PiEventFinish[KeyT] {
override def id: KeyT = i.id
- override def asString: String = s" === [$id] FINAL STATE === \n${i.state}\n === === === === === === === ===\n" +
- s" === [$id] FAILED - Executor encountered composite process thread: $process"
+ override def asString: String =
+ s" === [$id] FINAL STATE === \n${i.state}\n === === === === === === === ===\n" +
+ s" === [$id] FAILED - Executor encountered composite process thread: $process"
- override def exception: PiException[KeyT] = AtomicProcessIsCompositeException[KeyT]( i, process )
+ override def exception: PiException[KeyT] = AtomicProcessIsCompositeException[KeyT](i, process)
}
case class PiFailureNoSuchInstance[KeyT](
override val id: KeyT,
override val metadata: PiMetadataMap = PiMetadata()
-
- ) extends PiFailure[KeyT] with PiEventFinish[KeyT] {
+) extends PiFailure[KeyT]
+ with PiEventFinish[KeyT] {
override def asString: String = s" === [$id] FAILED - Failed to find instance!"
- override def exception: PiException[KeyT] = NoSuchInstanceException[KeyT]( id )
+ override def exception: PiException[KeyT] = NoSuchInstanceException[KeyT](id)
}
/** At least one AtomicProcess called by this PiInstance encountered an exception.
@@ -195,25 +194,24 @@ case class PiFailureExceptions[KeyT](
message: String,
trace: Array[StackTraceElement],
override val metadata: PiMetadataMap
-
- ) extends PiFailure[KeyT] with PiEventFinish[KeyT] {
+) extends PiFailure[KeyT]
+ with PiEventFinish[KeyT] {
override def asString: String = s" === [$id] FAILED - Exception: $message\n === [$id] Trace: $trace"
- override def exception: PiException[KeyT] = RemoteException[KeyT]( id, message, trace, rawTime )
+ override def exception: PiException[KeyT] = RemoteException[KeyT](id, message, trace, rawTime)
- override def equals( other: Any ): Boolean = other match {
+ override def equals(other: Any): Boolean = other match {
case that: PiFailureExceptions[KeyT] =>
id == that.id && message == that.message
}
}
object PiFailureExceptions {
- def apply[KeyT]( id: KeyT, ex: Throwable, metadata: PiMetadataMap = PiMetadata() ): PiFailureExceptions[KeyT]
- = PiFailureExceptions[KeyT]( id, ex.getLocalizedMessage, ex.getStackTrace, metadata )
+ def apply[KeyT](id: KeyT, ex: Throwable, metadata: PiMetadataMap = PiMetadata()): PiFailureExceptions[KeyT] =
+ PiFailureExceptions[KeyT](id, ex.getLocalizedMessage, ex.getStackTrace, metadata)
}
-
///////////////////////////////////////
// AtomicProcess Call Level PiEvents //
///////////////////////////////////////
@@ -230,9 +228,9 @@ case class PiEventCall[KeyT](
p: MetadataAtomicProcess,
args: Seq[PiObject],
override val metadata: PiMetadataMap = PiMetadata()
- ) extends PiAtomicProcessEvent[KeyT] {
+) extends PiAtomicProcessEvent[KeyT] {
- override def asString: String = s" === [$id] PROCESS CALL: ${p.name} ($ref) args: ${args.mkString(",")}"
+ override def asString: String = s" === [$id] PROCESS CALL: ${p.name} ($ref) args: ${args.mkString(",")}"
}
/** Abstract superclass for all PiAtomicProcessEvent which denote the termination of an AtomicProcess call.
@@ -244,13 +242,12 @@ case class PiEventReturn[KeyT](
ref: Int,
result: Any,
metadata: PiMetadataMap = PiMetadata()
+) extends PiAtomicProcessEvent[KeyT]
+ with PiEventCallEnd[KeyT] {
- ) extends PiAtomicProcessEvent[KeyT] with PiEventCallEnd[KeyT] {
-
- override def asString: String = s" === [$id] PROCESS RETURN: ($ref) returned: $result"
+ override def asString: String = s" === [$id] PROCESS RETURN: ($ref) returned: $result"
}
-
/////////////////////////////////////////
// AtomicProcess Call Level PiFailures //
/////////////////////////////////////////
@@ -261,130 +258,137 @@ case class PiFailureAtomicProcessException[KeyT](
message: String,
trace: Array[StackTraceElement],
override val metadata: PiMetadataMap
+) extends PiAtomicProcessEvent[KeyT]
+ with PiFailure[KeyT]
+ with PiEventCallEnd[KeyT] {
- ) extends PiAtomicProcessEvent[KeyT] with PiFailure[KeyT] with PiEventCallEnd[KeyT] {
-
- override def asString: String = s" === [$id] PROCESS [$ref] FAILED - Exception: $message\n === [$id] Trace: $trace"
+ override def asString: String = s" === [$id] PROCESS [$ref] FAILED - Exception: $message\n === [$id] Trace: $trace"
- override def exception: PiException[KeyT] = RemoteProcessException[KeyT]( id, ref, message, trace, rawTime )
+ override def exception: PiException[KeyT] = RemoteProcessException[KeyT](id, ref, message, trace, rawTime)
- override def equals( other: Any ): Boolean = other match {
+ override def equals(other: Any): Boolean = other match {
case that: PiFailureAtomicProcessException[KeyT] =>
id == that.id && ref == that.ref && message == that.message
}
}
object PiFailureAtomicProcessException {
- def apply[KeyT]( id: KeyT, ref: Int, ex: Throwable, metadata: PiMetadataMap = PiMetadata() ): PiFailureAtomicProcessException[KeyT]
- = PiFailureAtomicProcessException[KeyT]( id, ref, ex.getLocalizedMessage, ex.getStackTrace, metadata )
+ def apply[KeyT](
+ id: KeyT,
+ ref: Int,
+ ex: Throwable,
+ metadata: PiMetadataMap = PiMetadata()
+ ): PiFailureAtomicProcessException[KeyT] =
+ PiFailureAtomicProcessException[KeyT](id, ref, ex.getLocalizedMessage, ex.getStackTrace, metadata)
}
-
////////////////////
// PiEventHanders //
////////////////////
// Return true if the handler is done and needs to be unsubscribed.
-trait PiEventHandler[KeyT] extends (PiEvent[KeyT]=>Boolean) {
- def name:String
- def and(h:PiEventHandler[KeyT]) = MultiPiEventHandler(this,h)
+trait PiEventHandler[KeyT] extends (PiEvent[KeyT] => Boolean) {
+ def name: String
+ def and(h: PiEventHandler[KeyT]) = MultiPiEventHandler(this, h)
}
-trait PiEventHandlerFactory[T,H <: PiEventHandler[T]] {
- def build(id:T):H
+trait PiEventHandlerFactory[T, H <: PiEventHandler[T]] {
+ def build(id: T): H
}
-class PrintEventHandler[T](override val name:String) extends PiEventHandler[T] {
+class PrintEventHandler[T](override val name: String) extends PiEventHandler[T] {
val formatter = new SimpleDateFormat("YYYY-MM-dd HH:mm:ss.SSS")
- override def apply(e:PiEvent[T]) = {
+ override def apply(e: PiEvent[T]) = {
val time = formatter.format(e.rawTime)
- System.err.println("["+time+"]" + e.asString)
+ System.err.println("[" + time + "]" + e.asString)
false
}
}
-
-class PromiseHandler[T](override val name:String, val id:T) extends PiEventHandler[T] {
+class PromiseHandler[T](override val name: String, val id: T) extends PiEventHandler[T] {
val promise = Promise[Any]()
- def future = promise.future
+ def future = promise.future
// class PromiseException(message:String) extends Exception(message)
- override def apply(e:PiEvent[T]) = if (e.id == this.id) e match {
- case PiEventResult(i,res,_) => promise.success(res); true
- case ex: PiFailure[T] => promise.failure( ex.exception ); true
- case _ => false
- } else false
+ override def apply(e: PiEvent[T]) =
+ if (e.id == this.id) e match {
+ case PiEventResult(i, res, _) => promise.success(res); true
+ case ex: PiFailure[T] => promise.failure(ex.exception); true
+ case _ => false
+ } else false
}
-class PromiseHandlerFactory[T](name:T=>String) extends PiEventHandlerFactory[T,PromiseHandler[T]] {
- def this(name:String) = this { _:T => name }
- override def build(id:T) = new PromiseHandler[T](name(id),id)
+class PromiseHandlerFactory[T](name: T => String) extends PiEventHandlerFactory[T, PromiseHandler[T]] {
+ def this(name: String) = this { _: T =>
+ name
+ }
+ override def build(id: T) = new PromiseHandler[T](name(id), id)
}
-
-
-class CounterHandler[T](override val name:String, val id:T) extends PiEventHandler[T] {
- private var counter:Int = 0
- def count = counter
- val promise = Promise[Int]()
- def future = promise.future
-
- override def apply(e:PiEvent[T]) = if (e.id == this.id) e match {
- case PiEventResult(i,res,_) => counter += 1 ; promise.success(counter) ; true
- case ex: PiFailure[T] => counter += 1; promise.success(counter) ; true
- case _ => counter += 1 ; false
- } else false
+class CounterHandler[T](override val name: String, val id: T) extends PiEventHandler[T] {
+ private var counter: Int = 0
+ def count = counter
+ val promise = Promise[Int]()
+ def future = promise.future
+
+ override def apply(e: PiEvent[T]) =
+ if (e.id == this.id) e match {
+ case PiEventResult(i, res, _) => counter += 1; promise.success(counter); true
+ case ex: PiFailure[T] => counter += 1; promise.success(counter); true
+ case _ => counter += 1; false
+ } else false
}
-class CounterHandlerFactory[T](name:T=>String) extends PiEventHandlerFactory[T,CounterHandler[T]] {
- def this(name:String) = this { _:T => name }
- override def build(id:T) = new CounterHandler[T](name(id),id)
+class CounterHandlerFactory[T](name: T => String) extends PiEventHandlerFactory[T, CounterHandler[T]] {
+ def this(name: String) = this { _: T =>
+ name
+ }
+ override def build(id: T) = new CounterHandler[T](name(id), id)
}
-
-case class MultiPiEventHandler[T](handlers:Queue[PiEventHandler[T]]) extends PiEventHandler[T] {
- override def name = handlers map (_.name) mkString(",")
- override def apply(e:PiEvent[T]) = handlers map (_(e)) forall (_ == true)
- override def and(h:PiEventHandler[T]) = MultiPiEventHandler(handlers :+ h)
+case class MultiPiEventHandler[T](handlers: Queue[PiEventHandler[T]]) extends PiEventHandler[T] {
+ override def name = handlers map (_.name) mkString (",")
+ override def apply(e: PiEvent[T]) = handlers map (_(e)) forall (_ == true)
+ override def and(h: PiEventHandler[T]) = MultiPiEventHandler(handlers :+ h)
}
-
+
object MultiPiEventHandler {
- def apply[T](handlers:PiEventHandler[T]*):MultiPiEventHandler[T] = MultiPiEventHandler[T](Queue[PiEventHandler[T]]() ++ handlers)
+ def apply[T](handlers: PiEventHandler[T]*): MultiPiEventHandler[T] =
+ MultiPiEventHandler[T](Queue[PiEventHandler[T]]() ++ handlers)
}
-
trait PiObservable[T] {
- def subscribe(handler:PiEventHandler[T]):Future[Boolean]
- def unsubscribe(handlerName:String):Future[Boolean]
+ def subscribe(handler: PiEventHandler[T]): Future[Boolean]
+ def unsubscribe(handlerName: String): Future[Boolean]
}
trait SimplePiObservable[T] extends PiObservable[T] {
import collection.mutable.Map
- implicit val executionContext:ExecutionContext
+ implicit val executionContext: ExecutionContext
- val handlers:Map[String,PiEventHandler[T]] = Map[String,PiEventHandler[T]]()
-
- override def subscribe(handler:PiEventHandler[T]):Future[Boolean] = Future {
+ val handlers: Map[String, PiEventHandler[T]] = Map[String, PiEventHandler[T]]()
+
+ override def subscribe(handler: PiEventHandler[T]): Future[Boolean] = Future {
//System.err.println("Subscribed: " + handler.name)
handlers += (handler.name -> handler)
true
}
-
- override def unsubscribe(handlerName:String):Future[Boolean] = Future {
+
+ override def unsubscribe(handlerName: String): Future[Boolean] = Future {
handlers.remove(handlerName).isDefined
}
-
- def publish(evt:PiEvent[T]) = {
- handlers.retain((k,v) => !v(evt))
+
+ def publish(evt: PiEvent[T]) = {
+ handlers.retain((k, v) => !v(evt))
}
}
trait DelegatedPiObservable[T] extends PiObservable[T] {
val worker: PiObservable[T]
- override def subscribe( handler: PiEventHandler[T] ): Future[Boolean] = worker.subscribe( handler )
- override def unsubscribe( handlerName: String ): Future[Boolean] = worker.unsubscribe( handlerName )
+ override def subscribe(handler: PiEventHandler[T]): Future[Boolean] = worker.subscribe(handler)
+ override def unsubscribe(handlerName: String): Future[Boolean] = worker.unsubscribe(handlerName)
}
diff --git a/src/com/workflowfm/pew/PiException.scala b/src/com/workflowfm/pew/PiException.scala
index 89c70ff..30e37de 100644
--- a/src/com/workflowfm/pew/PiException.scala
+++ b/src/com/workflowfm/pew/PiException.scala
@@ -2,72 +2,64 @@ package com.workflowfm.pew
import com.workflowfm.pew.PiMetadata.SystemTime
-sealed abstract class PiException[KeyT]( message: String )
- extends Exception( message ) {
+sealed abstract class PiException[KeyT](message: String) extends Exception(message) {
def id: KeyT
def event: PiFailure[KeyT]
}
-sealed trait HasPiInstance[KeyT]
- extends PiException[KeyT] {
+sealed trait HasPiInstance[KeyT] extends PiException[KeyT] {
val pii: PiInstance[KeyT]
override def id: KeyT = pii.id
}
-case class NoResultException[KeyT]( override val pii: PiInstance[KeyT] )
- extends PiException[KeyT]( s"[${pii.id}] NO RESULT!" )
+case class NoResultException[KeyT](override val pii: PiInstance[KeyT])
+ extends PiException[KeyT](s"[${pii.id}] NO RESULT!")
with HasPiInstance[KeyT] {
- override def event: PiFailure[KeyT]
- = PiFailureNoResult( pii )
+ override def event: PiFailure[KeyT] = PiFailureNoResult(pii)
}
-case class UnknownProcessException[KeyT]( override val pii: PiInstance[KeyT], process: String )
- extends PiException[KeyT]( s"[${pii.id}] FAILED - Unknown process: $process" )
+case class UnknownProcessException[KeyT](override val pii: PiInstance[KeyT], process: String)
+ extends PiException[KeyT](s"[${pii.id}] FAILED - Unknown process: $process")
with HasPiInstance[KeyT] {
- override def event: PiFailure[KeyT]
- = PiFailureUnknownProcess( pii, process )
+ override def event: PiFailure[KeyT] = PiFailureUnknownProcess(pii, process)
}
-case class AtomicProcessIsCompositeException[KeyT]( override val pii: PiInstance[KeyT], process: String )
- extends PiException[KeyT]( s"[${pii.id}] FAILED - Executor encountered composite process thread: $process" )
+case class AtomicProcessIsCompositeException[KeyT](override val pii: PiInstance[KeyT], process: String)
+ extends PiException[KeyT](s"[${pii.id}] FAILED - Executor encountered composite process thread: $process")
with HasPiInstance[KeyT] {
- override def event: PiFailure[KeyT]
- = PiFailureAtomicProcessIsComposite( pii, process )
+ override def event: PiFailure[KeyT] = PiFailureAtomicProcessIsComposite(pii, process)
}
-case class NoSuchInstanceException[KeyT]( override val id: KeyT )
- extends PiException[KeyT]( s"[$id] FAILED - Failed to find instance!" ) {
+case class NoSuchInstanceException[KeyT](override val id: KeyT)
+ extends PiException[KeyT](s"[$id] FAILED - Failed to find instance!") {
- override def event: PiFailure[KeyT]
- = PiFailureNoSuchInstance( id )
+ override def event: PiFailure[KeyT] = PiFailureNoSuchInstance(id)
}
class RemoteException[KeyT](
override val id: KeyT,
val message: String,
val time: Long
+) extends PiException[KeyT](message) {
- ) extends PiException[KeyT]( message ) {
-
- override def event: PiFailure[KeyT]
- = PiFailureExceptions( id, message, getStackTrace, PiMetadata( SystemTime -> time ) )
+ override def event: PiFailure[KeyT] = PiFailureExceptions(id, message, getStackTrace, PiMetadata(SystemTime -> time))
}
object RemoteException {
- def apply[KeyT]( id: KeyT, message: String, trace: Array[StackTraceElement], time: Long ): RemoteException[KeyT] = {
- val exception = new RemoteException( id, message, time )
- exception.setStackTrace( trace )
+ def apply[KeyT](id: KeyT, message: String, trace: Array[StackTraceElement], time: Long): RemoteException[KeyT] = {
+ val exception = new RemoteException(id, message, time)
+ exception.setStackTrace(trace)
exception
}
- def apply[KeyT]( id: KeyT, t: Throwable ): RemoteException[KeyT]
- = apply[KeyT]( id, t.getMessage, t.getStackTrace, System.currentTimeMillis() )
+ def apply[KeyT](id: KeyT, t: Throwable): RemoteException[KeyT] =
+ apply[KeyT](id, t.getMessage, t.getStackTrace, System.currentTimeMillis())
}
class RemoteProcessException[KeyT](
@@ -75,20 +67,25 @@ class RemoteProcessException[KeyT](
val ref: Int,
val message: String,
val time: Long
+) extends PiException[KeyT](message) {
- ) extends PiException[KeyT]( message ) {
-
- override def event: PiFailure[KeyT]
- = PiFailureAtomicProcessException( id, ref, message, getStackTrace, PiMetadata( SystemTime -> time ) )
+ override def event: PiFailure[KeyT] =
+ PiFailureAtomicProcessException(id, ref, message, getStackTrace, PiMetadata(SystemTime -> time))
}
object RemoteProcessException {
- def apply[KeyT]( id: KeyT, ref: Int, message: String, trace: Array[StackTraceElement], time: Long ): RemoteProcessException[KeyT] = {
- val exception = new RemoteProcessException( id, ref, message, time )
- exception.setStackTrace( trace )
+ def apply[KeyT](
+ id: KeyT,
+ ref: Int,
+ message: String,
+ trace: Array[StackTraceElement],
+ time: Long
+ ): RemoteProcessException[KeyT] = {
+ val exception = new RemoteProcessException(id, ref, message, time)
+ exception.setStackTrace(trace)
exception
}
- def apply[KeyT]( id: KeyT, ref: Int, t: Throwable ): RemoteProcessException[KeyT]
- = apply[KeyT]( id, ref, t.getMessage, t.getStackTrace, System.currentTimeMillis() )
-}
\ No newline at end of file
+ def apply[KeyT](id: KeyT, ref: Int, t: Throwable): RemoteProcessException[KeyT] =
+ apply[KeyT](id, ref, t.getMessage, t.getStackTrace, System.currentTimeMillis())
+}
diff --git a/src/com/workflowfm/pew/PiInstance.scala b/src/com/workflowfm/pew/PiInstance.scala
index bb42561..ac46fc4 100644
--- a/src/com/workflowfm/pew/PiInstance.scala
+++ b/src/com/workflowfm/pew/PiInstance.scala
@@ -1,117 +1,170 @@
package com.workflowfm.pew
-case class PiInstance[T](final val id:T, called:Seq[Int], process:PiProcess, state:PiState) {
- def result:Option[Any] = {
+case class PiInstance[T](final val id: T, called: Seq[Int], process: PiProcess, state: PiState) {
+ def result: Option[Any] = {
val res = state.resources.sub(process.output._1)
- if (res.isGround) Some(PiObject.get(res))
- else None
+ if (res.isGround) {
+ Some(PiObject.get(res))
+ } else {
+ None
+ }
}
-
+
/**
- * Assuming a fullReduce, the workflow is completed if:
- * (1) there are no new threads to be called
- * (2) there are no called threads to be returned
- */
- def completed:Boolean = state.threads.isEmpty && called.isEmpty
-
- def postResult(thread:Int, res:PiObject):PiInstance[T] = copy(
- called = called filterNot (_ == thread),
- state = state.result(thread,res).map(_.fullReduce()).getOrElse(state)
- )
-
- /** @return Post multiple thread results to this PiInstance simultaneously and fully reduce the result.
+ * Assuming a fullReduce, the workflow is completed iff:
+ * (1) there are no new threads to be called
+ * (2) there are no called threads to be returned
*/
- def postResult( allRes: Seq[(Int, PiObject)] ): PiInstance[T] = copy(
- called = called filterNot (allRes.map(_._1) contains _),
- state = allRes.foldLeft( state )( (s,e) => s.result(e._1, e._2).getOrElse(s) ).fullReduce()
- )
-
- def reduce:PiInstance[T] = copy(state = state.fullReduce())
-
- def handleThreads(handler:(Int,PiFuture)=>Boolean):(Seq[Int],PiInstance[T]) = {
- val newstate = state.handleThreads(THandler(handler))
- val newcalls = newstate.threads.keys.toSeq
- (newcalls diff called,copy(called=newcalls,state=newstate))
- }
+ def completed: Boolean = state.threads.isEmpty && called.isEmpty
+
+ /**
+ * Post the result of a call, removing the completed call from the running calls, replace its output future with the
+ * value and fully reduce.
+ *
+ * @param thread Completed call thread
+ * @param res Call result
+ * @return Updated instance
+ */
+ def postResult(thread: Int, res: PiObject): PiInstance[T] = copy(
+ called = called filterNot (_ == thread),
+ state = state
+ .result(thread, res)
+ .map(_.fullReduce())
+ .getOrElse(state)
+ )
/**
- * Wrapper for thread handlers that ignores threads that have been already called.
- */
- private case class THandler(handler:(Int,PiFuture)=>Boolean) extends (((Int,PiFuture)) => Boolean) {
- def apply(t:(Int,PiFuture)):Boolean =
- if (called contains t._1) true
- else handler(t._1,t._2)
+ * Post the results of multiple calls, removing the completed calls from the running calls, replacing their output
+ * futures with the values and fully reduce.
+ *
+ * @param allRes Sequence of thread-result pairs
+ * @return Updated instance
+ */
+ def postResult(allRes: Seq[(Int, PiObject)]): PiInstance[T] = copy(
+ called = called filterNot (allRes.map(_._1) contains _),
+ state = allRes
+ .foldLeft(state)((s, e) => s.result(e._1, e._2).getOrElse(s))
+ .fullReduce()
+ )
+
+ def reduce: PiInstance[T] = copy(state = state.fullReduce())
+
+ def handleThreads(handler: (Int, PiFuture) => Boolean): (Seq[Int], PiInstance[T]) = {
+ // Apply the thread handler to the state
+ val newState = state.handleThreads(THandler(handler))
+
+ // Retrieve the new set of threads
+ val newCalls = newState.threads.keys.toSeq
+
+ // Return the set of handled threads that were not yet called, as well as the updated instance
+ (newCalls diff called, copy(called = newCalls, state = newState))
}
-
- def piFutureOf(ref:Int):Option[PiFuture] = state.threads.get(ref)
-
- def getProc(p:String):Option[PiProcess] = state.processes.get(p)
-
- def getAtomicProc( name: String ): MetadataAtomicProcess = {
- getProc( name ) match {
- case None => throw UnknownProcessException( this, name )
- case Some( proc ) => proc match {
- case atomic: MetadataAtomicProcess => atomic
- case _: PiProcess => throw AtomicProcessIsCompositeException( this, name )
+
+ /**
+ * Wrapper for thread handlers that ignores threads that have been already called.
+ */
+ private case class THandler(handler: (Int, PiFuture) => Boolean) extends (((Int, PiFuture)) => Boolean) {
+
+ def apply(t: (Int, PiFuture)): Boolean =
+ if (called contains t._1) {
+ true
+ } else {
+ handler(t._1, t._2)
}
+ }
+
+ def piFutureOf(ref: Int): Option[PiFuture] = state.threads.get(ref)
+
+ def getProc(p: String): Option[PiProcess] = state.processes.get(p)
+
+ def getAtomicProc(name: String): MetadataAtomicProcess = {
+ getProc(name) match {
+ case None => throw UnknownProcessException(this, name)
+ case Some(proc) =>
+ proc match {
+ case atomic: MetadataAtomicProcess => atomic
+ case _: PiProcess => throw AtomicProcessIsCompositeException(this, name)
+ }
}
}
-
+
/**
- * Should the simulator wait for the workflow?
- */
- def simulationReady:Boolean =
- if (completed) true // workflow is done
- else {
- val procs = state.threads flatMap { f => getProc(f._2.fun) }
- if (procs.isEmpty) false // workflow is not completed, so we either couldn't find a process with getProc or
- // calls have not been converted to threads yet (so no fullreduce) for whatever reason
- else procs.forall(_.isSimulatedProcess) // are all open threads simulated processes?
+ * Is the workflow ready for simulation?
+ */
+ def simulationReady: Boolean =
+ if (completed) {
+ // Workflow is done
+ true
+ } else {
+ // Workflow not done --> getProc failed or some call not converted to thread, or only simulated processes left
+
+ // Get processes for all running threads
+ val procs = state.threads flatMap { f =>
+ getProc(f._2.fun)
+ }
+
+ if (procs.isEmpty) {
+ // getProc failed or some call not converted to thread
+ false
+ } else {
+ // Are all open threads simulated processes?
+ procs.forall(_.isSimulatedProcess)
+ }
}
}
+
object PiInstance {
- def apply[T](id:T,p:PiProcess,args:PiObject*):PiInstance[T] = PiInstance(id, Seq(), p, p.execState(args))
- /** Construct a PiInstance as if we are making a call to ProcessExecutor.execute
+ def apply[T](id: T, p: PiProcess, args: PiObject*): PiInstance[T] = PiInstance(id, Seq(), p, p.execState(args))
+
+ /**
+ * Construct a PiInstance as if we are making a call to ProcessExecutor.execute
*/
- def forCall[T]( id: T, p: PiProcess, args: Any* ): PiInstance[T]
- = PiInstance( id, Seq(), p, p execState ( args map PiObject.apply ) )
+ def forCall[T](id: T, p: PiProcess, args: Any*): PiInstance[T] =
+ PiInstance(id, Seq(), p, p execState (args map PiObject.apply))
}
+trait PiInstanceStore[T] {
+ def get(id: T): Option[PiInstance[T]]
+ def put(i: PiInstance[T]): PiInstanceStore[T]
+ def del(id: T): PiInstanceStore[T]
-trait PiInstanceStore[T] {
- def get(id:T):Option[PiInstance[T]]
- def put(i:PiInstance[T]):PiInstanceStore[T]
- def del(id:T):PiInstanceStore[T]
- def simulationReady:Boolean
+ def simulationReady: Boolean
}
trait PiInstanceMutableStore[T] {
- def get(id:T):Option[PiInstance[T]]
- def put(i:PiInstance[T]):Unit
- def del(id:T):Unit
- def simulationReady:Boolean
+ def get(id: T): Option[PiInstance[T]]
+
+ def put(i: PiInstance[T]): Unit
+
+ def del(id: T): Unit
+
+ def simulationReady: Boolean
}
-case class SimpleInstanceStore[T](m: Map[T,PiInstance[T]]) extends PiInstanceStore[T] {
+case class SimpleInstanceStore[T](m: Map[T, PiInstance[T]]) extends PiInstanceStore[T] {
override def get(id: T): Option[PiInstance[T]] = m.get(id)
- override def put(i: PiInstance[T]): SimpleInstanceStore[T] = copy(m = m + (i.id->i))
+
+ override def put(i: PiInstance[T]): SimpleInstanceStore[T] = copy(m = m + (i.id -> i))
+
override def del(id: T): SimpleInstanceStore[T] = copy(m = m - id)
-
+
override def simulationReady: Boolean = m.values.forall(_.simulationReady)
+
/*{
m.values.foreach { i => {
val procs = i.state.threads.values.map(_.fun).mkString(", ")
println(s"**> [${i.id}] is waiting for procs: $procs")
- } }
-
+ } }
+
}*/
}
object SimpleInstanceStore {
- def apply[T](l: PiInstance[T]*): SimpleInstanceStore[T]
- = (SimpleInstanceStore( Map[T, PiInstance[T]]() ) /: l) (_.put(_))
+
+ def apply[T](l: PiInstance[T]*): SimpleInstanceStore[T] =
+ (SimpleInstanceStore(Map[T, PiInstance[T]]()) /: l)(_.put(_))
}
diff --git a/src/com/workflowfm/pew/PiMetadata.scala b/src/com/workflowfm/pew/PiMetadata.scala
index 9b13bad..0bbc759 100644
--- a/src/com/workflowfm/pew/PiMetadata.scala
+++ b/src/com/workflowfm/pew/PiMetadata.scala
@@ -22,30 +22,28 @@ object PiMetadata {
* @param id The String key of the type of meta-data.
* @tparam T The Scala-type of the type of meta-data.
*/
- case class Key[T]( val id: String ) extends (PiMetadataMap => T) {
- override def apply( meta: PiMetadataMap ): T
- = meta(id).asInstanceOf[T]
+ case class Key[T](val id: String) extends (PiMetadataMap => T) {
+ override def apply(meta: PiMetadataMap): T = meta(id).asInstanceOf[T]
- def get( meta: PiMetadataMap ): Option[T]
- = meta.get(id).map( _.asInstanceOf[T] )
+ def get(meta: PiMetadataMap): Option[T] = meta.get(id).map(_.asInstanceOf[T])
}
/** The system time (in milliseconds) when a PiEvent
* actually occured during computation.
*/
- object SystemTime extends Key[Long]( "SysTime" )
+ object SystemTime extends Key[Long]("SysTime")
/** The simulated time (in its own units) when the real-life
* event that is represented by this PiEvent occured.
*/
- object SimulatedTime extends Key[Long]( "SimTime" )
+ object SimulatedTime extends Key[Long]("SimTime")
/** Construct a PiMetadataMap with the given meta-data.
*/
- def apply( data: PiMetadataElem[_]* ): PiMetadataMap = {
+ def apply(data: PiMetadataElem[_]*): PiMetadataMap = {
// Jev, ensure there's at least a SystemPiTime.
lazy val current = SystemTime -> System.currentTimeMillis()
- (current +: data).map( elem => (elem._1.id, elem._2) ).toMap
+ (current +: data).map(elem => (elem._1.id, elem._2)).toMap
}
}
diff --git a/src/com/workflowfm/pew/PiObject.scala b/src/com/workflowfm/pew/PiObject.scala
index be71054..44a50a4 100644
--- a/src/com/workflowfm/pew/PiObject.scala
+++ b/src/com/workflowfm/pew/PiObject.scala
@@ -1,149 +1,147 @@
package com.workflowfm.pew
/**
- * A PiObject is a custom structure representing an object being passed around a pi-calculus workflow.
- * It mirrors the propositional CLL types, but is untyped at the item level.
- *
- * A PiObject can be:
- * 1) A pi-calculus channel: These are used in multiple ways. They may be used for communication,
- * they may be communicated themselves, and they are also used as variables (see below).
- * 2) An item: This represents a resource (actually a container) that is untyped, in practice,
- * but its type corresponds to a CLL proposition. It is untyped so we can freely send it through
- * pi-calculus channels without the compiler complaining. Its correspondence with CLL ensures we
- * can type cast it without error when we need to obtain the actual resource.
- * 3) A pair of PiObjects corresponding to the Pair Scala type and the CLL tensor/times operator.
- * 4) An option between 2 PiObjects corresponding to the Either Scala type and the CLL plus operator.
- * 5) A Left or a Right PiObject corresponding to the instances of the Either Scala type (used as
- * runtime optional outputs).
- *
- * Channels as variables:
- * We use Chan PiObjects as variables to create patterns of expected inputs and outputs.
- * For example, a process may expect a pair PiPair(Chan(X),Chan(Y)) as input.
- * Each of the channels X and Y will (should) eventually be instantiated to a resource,
- * e.g. receiving input a(X).0 from a matching output 'a<"Hello">.0 creates the instatiation
- * X --> "Hello"
- * We apply these instantiations to the pattern PiPair(Chan(X),Chan(Y)) until the
- * pattern contains no channels/variables. We then know that the input is available and
- * the process can be executed (pending all other inputs as well).
- * A process would never receive and/or do anything useful with a pi-calculus channel.
- *
- */
+ * A PiObject is a custom structure representing an object being passed around a pi-calculus workflow.
+ * It mirrors the propositional CLL types, but is untyped at the item level.
+ *
+ * A PiObject can be:
+ * 1) A pi-calculus channel: These are used in multiple ways. They may be used for communication,
+ * they may be communicated themselves, and they are also used as variables (see below).
+ * 2) An item: This represents a resource (actually a container) that is untyped, in practice,
+ * but its type corresponds to a CLL proposition. It is untyped so we can freely send it through
+ * pi-calculus channels without the compiler complaining. Its correspondence with CLL ensures we
+ * can type cast it without error when we need to obtain the actual resource.
+ * 3) A pair of PiObjects corresponding to the Pair Scala type and the CLL tensor/times operator.
+ * 4) An option between 2 PiObjects corresponding to the Either Scala type and the CLL plus operator.
+ * 5) A Left or a Right PiObject corresponding to the instances of the Either Scala type (used as
+ * runtime optional outputs).
+ *
+ * Channels as variables:
+ * We use Chan PiObjects as variables to create patterns of expected inputs and outputs.
+ * For example, a process may expect a pair PiPair(Chan(X),Chan(Y)) as input.
+ * Each of the channels X and Y will (should) eventually be instantiated to a resource,
+ * e.g. receiving input a(X).0 from a matching output 'a<"Hello">.0 creates the instantiation
+ * X --> "Hello"
+ * We apply these instantiations to the pattern PiPair(Chan(X),Chan(Y)) until the
+ * pattern contains no channels/variables. We then know that the input is available and
+ * the process can be executed (pending all other inputs as well).
+ * A process would never receive and/or do anything useful with a pi-calculus channel.
+ *
+ */
sealed trait PiObject {
- def isGround:Boolean = frees.isEmpty
- def frees:Seq[Chan] = Seq()
- def fresh(i:Int): PiObject
+ def isGround: Boolean = frees.isEmpty
+ def frees: Seq[Chan] = Seq()
+ def fresh(i: Int): PiObject
}
object PiObject {
- def apply(a:Any):PiObject = a match {
- case (l,r) => PiPair(PiObject(l),PiObject(r))
- case Left(l) => PiLeft(PiObject(l))
+ def apply(a: Any): PiObject = a match {
+ case (l, r) => PiPair(PiObject(l), PiObject(r))
+ case Left(l) => PiLeft(PiObject(l))
case Right(r) => PiRight(PiObject(r))
- case x => PiItem(x)
+ case x => PiItem(x)
}
-
- def get(o:PiObject):Any = o match {
- case Chan(s) => s
- case PiItem(i) => i
- case PiPair(l,r) => (get(l),get(r))
- case PiLeft(l) => Left(get(l))
- case PiRight(r) => Right(get(r))
- case PiOpt(l,r) => if (l.isGround) Left(get(l)) else Right(get(r))
+
+ def get(o: PiObject): Any = o match {
+ case Chan(s) => s
+ case PiItem(i) => i
+ case PiPair(l, r) => (get(l), get(r))
+ case PiLeft(l) => Left(get(l))
+ case PiRight(r) => Right(get(r))
+ case PiOpt(l, r) => if (l.isGround) Left(get(l)) else Right(get(r))
}
-
- def getAs[A](o:PiObject) = get(o).asInstanceOf[A]
+
+ def getAs[A](o: PiObject): A = get(o).asInstanceOf[A]
}
-case class Chan(s:String) extends PiObject {
- override def isGround:Boolean = false
- override def frees:Seq[Chan] = Seq(this)
- override def fresh(i:Int) = Chan(s + "#" + i)
+case class Chan(s: String) extends PiObject {
+ override def isGround: Boolean = false
+ override def frees: Seq[Chan] = Seq(this)
+ override def fresh(i: Int) = Chan(s + "#" + i)
}
-case class PiItem[+A](i:A) extends PiObject {
- override def isGround:Boolean = true
- override val frees:Seq[Chan] = Seq()
- override def fresh(i:Int) = this
+case class PiItem[+A](i: A) extends PiObject {
+ override def isGround: Boolean = true
+ override val frees: Seq[Chan] = Seq()
+ override def fresh(i: Int): PiItem[A] = this
}
-case class PiPair(l:PiObject,r:PiObject) extends PiObject {
- override def isGround:Boolean = l.isGround && r.isGround
- override def frees:Seq[Chan] = l.frees ++ r.frees
- override def fresh(i:Int) = PiPair(l.fresh(i),r.fresh(i))
+case class PiPair(l: PiObject, r: PiObject) extends PiObject {
+ override def isGround: Boolean = l.isGround && r.isGround
+ override def frees: Seq[Chan] = l.frees ++ r.frees
+ override def fresh(i: Int) = PiPair(l.fresh(i), r.fresh(i))
}
-case class PiOpt(l:PiObject,r:PiObject) extends PiObject {
- override def isGround:Boolean = l.isGround || r.isGround
- override def frees:Seq[Chan] = l.frees ++ r.frees
- override def fresh(i:Int) = PiOpt(l.fresh(i),r.fresh(i))
+case class PiOpt(l: PiObject, r: PiObject) extends PiObject {
+ override def isGround: Boolean = l.isGround || r.isGround
+ override def frees: Seq[Chan] = l.frees ++ r.frees
+ override def fresh(i: Int) = PiOpt(l.fresh(i), r.fresh(i))
}
-case class PiLeft(l:PiObject) extends PiObject {
- override def isGround:Boolean = l.isGround
- override def frees:Seq[Chan] = l.frees
- override def fresh(i:Int) = PiLeft(l.fresh(i))
+case class PiLeft(l: PiObject) extends PiObject {
+ override def isGround: Boolean = l.isGround
+ override def frees: Seq[Chan] = l.frees
+ override def fresh(i: Int) = PiLeft(l.fresh(i))
}
-case class PiRight(r:PiObject) extends PiObject {
- override def isGround:Boolean = r.isGround
- override def frees:Seq[Chan] = r.frees
- override def fresh(i:Int) = PiRight(r.fresh(i))
+case class PiRight(r: PiObject) extends PiObject {
+ override def isGround: Boolean = r.isGround
+ override def frees: Seq[Chan] = r.frees
+ override def fresh(i: Int) = PiRight(r.fresh(i))
}
-
/**
- * ChanMaps represent instantiations of pi-calculus channels (Chan) to other PiObjects.
- * They are used for substitution (especially in continuations) and to instantiate patterns.
- *
- */
-case class ChanMap(map:Map[Chan,PiObject] = Map()) {
-
+ * ChanMaps represent instantiations of pi-calculus channels (Chan) to other PiObjects.
+ * They are used for substitution (especially in continuations) and to instantiate patterns.
+ *
+ */
+case class ChanMap(map: Map[Chan, PiObject] = Map()) {
+
/**
- * Sometimes chains of channel-to-channel mappings occur through pi-calculus communication.
- * e.g. X --> Y and Y --> Z
- * This returns the last channel (Z) in such a chain. It ignores any non-channel PiObject that may
- * come after that.
- */
- def resolve(c:Chan):Chan =
- map.get(c) match {
- case None => c
- case Some(Chan(x)) if c.s != x => resolve(Chan(x))
- case _ => c
- }
-
- def resolve(s:String):String = resolve(Chan(s)).s
-
+ * Sometimes chains of channel-to-channel mappings occur through pi-calculus communication.
+ * e.g. X --> Y and Y --> Z
+ * This returns the last channel (Z) in such a chain. It ignores any non-channel PiObject that may
+ * come after that.
+ */
+ def resolve(c: Chan): Chan =
+ map.get(c) match {
+ case None => c
+ case Some(Chan(x)) if c.s != x => resolve(Chan(x))
+ case _ => c
+ }
+
+ def resolve(s: String): String = resolve(Chan(s)).s
+
/**
- * Returns the PiObject at the end of a chain of channel mappings.
- */
- def obtain(c:Chan):PiObject = {
+ * Returns the PiObject at the end of a chain of channel mappings.
+ */
+ def obtain(c: Chan): PiObject = {
val cr = resolve(c)
- map.getOrElse(cr,cr)
+ map.getOrElse(cr, cr)
}
/**
- * Applies the channel mapping as a substitution on a PiObject.
- */
- def sub(o:PiObject):PiObject = {
+ * Applies the channel mapping as a substitution on a PiObject.
+ */
+ def sub(o: PiObject): PiObject = {
o match {
- case Chan(c) => obtain(Chan(c))
- case PiItem(i) => PiItem(i)
- case PiPair(l,r) => PiPair(sub(l),sub(r))
- case PiOpt(l,r) => PiOpt(sub(l),sub(r))
- case PiLeft(l) => PiLeft(sub(l))
- case PiRight(r) => PiRight(sub(r))
+ case Chan(c) => obtain(Chan(c))
+ case PiItem(i) => PiItem(i)
+ case PiPair(l, r) => PiPair(sub(l), sub(r))
+ case PiOpt(l, r) => PiOpt(sub(l), sub(r))
+ case PiLeft(l) => PiLeft(sub(l))
+ case PiRight(r) => PiRight(sub(r))
}
}
- def +(c:Chan,v:PiObject):ChanMap = ChanMap(map + (c->v))
- def -(c:Chan):ChanMap = ChanMap(map - c)
- def --(l:Chan*):ChanMap = ChanMap(map -- l)
- def ++(m:ChanMap):ChanMap = ChanMap(map ++ m.map)
- def ++(m:Map[Chan,PiObject]):ChanMap = ChanMap(map ++ m)
- def ++(l:Seq[(Chan,PiObject)]):ChanMap = ++(Map(l:_*))
+ def +(c: Chan, v: PiObject): ChanMap = ChanMap(map + (c -> v))
+ def -(c: Chan): ChanMap = ChanMap(map - c)
+ def --(l: Chan*): ChanMap = ChanMap(map -- l)
+ def ++(m: ChanMap): ChanMap = ChanMap(map ++ m.map)
+ def ++(m: Map[Chan, PiObject]): ChanMap = ChanMap(map ++ m)
+ def ++(l: Seq[(Chan, PiObject)]): ChanMap = ++(Map(l: _*))
}
object ChanMap {
- def apply(l:(Chan,PiObject)*):ChanMap = ChanMap(Map(l:_*))
+ def apply(l: (Chan, PiObject)*): ChanMap = ChanMap(Map(l: _*))
}
-
-case class PiResource(obj:PiObject,c:Chan)
+case class PiResource(obj: PiObject, c: Chan)
object PiResource {
- def of(obj:PiObject,c:String,m:ChanMap=ChanMap()):PiResource = PiResource(obj,m.resolve(Chan(c)))
-}
\ No newline at end of file
+ def of(obj: PiObject, c: String, m: ChanMap = ChanMap()): PiResource = PiResource(obj, m.resolve(Chan(c)))
+}
diff --git a/src/com/workflowfm/pew/PiProcess.scala b/src/com/workflowfm/pew/PiProcess.scala
index d99681d..34df7d9 100644
--- a/src/com/workflowfm/pew/PiProcess.scala
+++ b/src/com/workflowfm/pew/PiProcess.scala
@@ -7,107 +7,98 @@ import scala.concurrent.Future
import scala.concurrent.ExecutionContext
/**
- * A trait corresponding to any process that can be executed within our framework.
- * The specification generally follows the proofs-as-processes format, with PiObject patterns
- * representing CLL types, paired with strings representing the channel names. *
- */
+ * Corresponds to any process that can be executed within our framework.
+ * The specification generally follows the proofs-as-processes format, with PiObject patterns
+ * representing CLL types, paired with strings representing the channel names. *
+ */
sealed trait PiProcess {
- def name:String // Name of the process
- def iname:String = name // This can be used to identify different instances of this process that run
- // on different workflows in the same executor.
- // Note that each workflow can only support one instance of a process.
- def output:(PiObject,String) // Type (as a PiObject pattern) and channel of the output.
- def inputs:Seq[(PiObject,String)] // List of (type,channel) for each input.
- def channels:Seq[String] = output._2 +: (inputs map (_._2)) // order of channels is important for correct process calls!
-
- def dependencies:Seq[PiProcess] // dependencies of composite processes
-
- def allDependencies:Seq[PiProcess] = PiProcess.allDependenciesOf(this) // all ancestors (i.e. including dependencies of dependencies
-
+ /** Process name */
+ def name: String
+ /** Instance name (only one instance supported per workflow) */
+ def iname: String = name
+ /** Output type and channel */
+ def output: (PiObject, String)
+ /** Sequence of input type-channel pairs */
+ def inputs: Seq[(PiObject, String)]
+ /** Ordered sequence of process channels */
+ // Note: order is important for correct process calls
+ def channels: Seq[String] = output._2 +: (inputs map (_._2))
+ /** Composite process (direct) dependencies */
+ def dependencies: Seq[PiProcess]
+ /** All composite process dependencies (including indirect) */
+ def allDependencies: Seq[PiProcess] = PiProcess.allDependenciesOf(this)
+
/**
- * Initializes a PiState that executes this process with a given list of PiObject arguments.
- * Generates an Output term from each PiObject in args so that they can be fed to the process.
- * Also generates an Input that consumes the output of the process when it is done.
- * Adds all dependencies to the state and then runs a PiCall directly.
- */
- def execState(args:Seq[PiObject]):PiState = {
- val iTerms = args zip (inputs map (_._2)) map { case (o,c) => Output.of(o,c) }
- val oTerm = Input.of(output._1,output._2)
- PiState() withProc this withProcs (allDependencies :_*) withTerms iTerms withTerm oTerm withTerm PiCall(name,channels map Chan)
- }
-
- /**
- * Used when a process is called to map argument channels to the given values.
- */
- def mapArgs(args:Chan*) = ChanMap(channels map Chan zip args:_*)
- def mapFreshArgs(i:Int,args:Chan*) = ChanMap(channels map (_ + "#" + i) map Chan zip args:_*)
-
- def inputFrees() = inputs map (_._1.frees)
-
- override def toString = "[|" + name + "|]"
-
+ * Initializes a PiState that executes this process with a given list of PiObject arguments.
+ * Generates an Output term from each PiObject in args so that they can be fed to the process.
+ * Also generates an Input that consumes the output of the process when it is done.
+ * Adds all dependencies to the state and then runs a PiCall directly.
+ */
+ def execState(args: Seq[PiObject]): PiState = {
+ val iTerms = args zip (inputs map (_._2)) map { case (o, c) => Output.of(o, c) }
+ val oTerm = Input.of(output._1, output._2)
+ PiState() withProc this withProcs (allDependencies: _*) withTerms
+ iTerms withTerm oTerm withTerm PiCall(name, channels map Chan)
+ }
+
+ /** Used when a process is called to map argument channels to the given values. */
+ def mapArgs(args: Chan*) = ChanMap(channels map Chan zip args: _*)
+ def mapFreshArgs(i: Int, args: Chan*) = ChanMap(channels map (_ + "#" + i) map Chan zip args: _*)
+
+ def inputFrees(): Seq[Seq[Chan]] = inputs map (_._1.frees)
+
+ override def toString: String = "[|" + name + "|]"
+
+ /** Shortcut to create entries in name->PiProcess maps */
+ def toEntry: (String, PiProcess) = name -> this
+
+ /** Shortcut to create entries in name->PiProcess maps using the instance name */
+ def toIEntry: (String, PiProcess) = iname -> this
+
/**
- * Shortcut to create entries in name->PiProcess maps
- */
- def toEntry:(String,PiProcess) = name->this
-
- /**
- * * Shortcut to create entries in name->PiProcess maps using the instance name
- */
- def toIEntry:(String,PiProcess) = iname->this
-
- /**
- * This is used to identify simulation processes that need (virtual) time to complete.
- * If a process is not a simulation process, then the simulator needs to wait for it to complete before
- * the next virtual tick.
- */
+ * This is used to identify simulation processes that need (virtual) time to complete.
+ * If a process is not a simulation process, then the simulator needs to wait for it to complete before
+ * the next virtual tick.
+ */
def isSimulatedProcess = false
}
object PiProcess {
- def allDependenciesOf(p:PiProcess):Seq[PiProcess] =
- if (p.dependencies.isEmpty) Seq():Seq[PiProcess]
- else p.dependencies ++ (p.dependencies flatMap allDependenciesOf)
+ def allDependenciesOf(p: PiProcess): Seq[PiProcess] =
+ if (p.dependencies.isEmpty) {
+ Seq(): Seq[PiProcess]
+ } else {
+ p.dependencies ++ (p.dependencies flatMap allDependenciesOf)
+ }
}
-
trait MetadataAtomicProcess extends PiProcess {
- /**
- * This function runs the process with the given PiObject arguments in an implicit execution context.
+ /**
+ * Runs the process with the given PiObject arguments in an implicit execution context.
* Allows the child processes to return arbitrary meta-data
* which can be incorporated into the PiEvent history using supported ProcessExecutors.
*
* @return A future containing the result of the result as a PiObject and optional meta-data.
*/
- def runMeta( args: Seq[PiObject] )( implicit ec: ExecutionContext ): Future[MetadataAtomicResult]
- /**
- * This constructs a PiFuture to be added to the state when the process is called.
- */
- def getFuture(i:Int,m:ChanMap):PiFuture = PiFuture(name,m.resolve(Chan(output._2).fresh(i)),inputs map { case (o,c) => PiResource.of(o.fresh(i),c + "#" + i,m) })
- def getFuture(i:Int,args:Chan*):PiFuture = getFuture(i,mapFreshArgs(i,args:_*))
-
- /**
- * This constructs the Input terms needed to appopriately receive the process inputs.
- */
- def getInputs(i:Int,m:ChanMap):Seq[Input] = inputs map { case (o,c) => Input.of(m.sub(o.fresh(i)),m.resolve(c+"#"+i)) }
- def getInputs(i:Int,args:Chan*):Seq[Input] = getInputs(i,mapFreshArgs(i,args:_*))
-
- override val dependencies:Seq[PiProcess] = Seq()
-
-}
-
-trait AtomicProcess extends MetadataAtomicProcess {
-
- /** Implements the standard AtomicProcess interface for unsupporting ProcessExecutors.
- */
- final override def runMeta( args: Seq[PiObject] )( implicit ec: ExecutionContext ): Future[MetadataAtomicResult]
- = run( args ).map(MetadataAtomicProcess.result(_))
-
- def run(args:Seq[PiObject])(implicit ec:ExecutionContext):Future[PiObject]
-
+ def runMeta(args: Seq[PiObject])(implicit ec: ExecutionContext): Future[MetadataAtomicResult]
+
+ /** Constructs a PiFuture to be added to the state when the process is called. */
+ def getFuture(i: Int, m: ChanMap): PiFuture =
+ PiFuture(name, m.resolve(Chan(output._2).fresh(i)), inputs map {
+ case (o, c) => PiResource.of(o.fresh(i), c + "#" + i, m)
+ })
+ def getFuture(i: Int, args: Chan*): PiFuture =
+ getFuture(i, mapFreshArgs(i, args: _*))
+
+ /** Constructs the Input terms needed to appropriately receive the process inputs. */
+ def getInputs(i: Int, m: ChanMap): Seq[Input] =
+ inputs map { case (o, c) => Input.of(m.sub(o.fresh(i)), m.resolve(c + "#" + i)) }
+ def getInputs(i: Int, args: Chan*): Seq[Input] =
+ getInputs(i, mapFreshArgs(i, args: _*))
+
+ override val dependencies: Seq[PiProcess] = Seq()
}
object MetadataAtomicProcess {
-
type MetadataAtomicResult = (PiObject, PiMetadataMap)
/** Syntactic sugar for returning results from MetadataAtomicProcesses.
@@ -116,88 +107,97 @@ object MetadataAtomicProcess {
* @param data Metadata elements with their keys.
* @return Tuple of Result and Meta-data map.
*/
- def result( result: PiObject, data: PiMetadataElem[_]* ): MetadataAtomicResult
- = (result, PiMetadata( data: _* ) )
+ def result(result: PiObject, data: PiMetadataElem[_]*): MetadataAtomicResult =
+ (result, PiMetadata(data: _*))
+ /*
/** Upgrade AtomicProcess to MetadataAtomicProcesses so ProcessExecutors
- * can simply integrate existing process.
- *
- * @return A MetadataAtomicProcess wrapper around the input AtomicProcess
- * *IFF* it's not already a MetadataAtomicProcess.
- */
-/*
- implicit def from: AtomicProcess => MetadataAtomicProcess = {
- case existing: MetadataAtomicProcess => existing
-
- case original: AtomicProcess =>
- new MetadataAtomicProcess {
- override def name: String = original.name
- override def output: (PiObject, String) = original.output
- override def inputs: Seq[(PiObject, String)] = original.inputs
-
- override def runMeta(args: Seq[PiObject])(implicit ec: ExecutionContext): Future[(PiObject, PiMetadataMap)]
- = original.run(args).map((_, PiMetadata()))
- }
- }
+ * can simply integrate existing process.
+ *
+ * @return A MetadataAtomicProcess wrapper around the input AtomicProcess
+ * *IFF* it's not already a MetadataAtomicProcess.
+ */
+ implicit def from: AtomicProcess => MetadataAtomicProcess = {
+ case existing: MetadataAtomicProcess => existing
+
+ case original: AtomicProcess =>
+ new MetadataAtomicProcess {
+ override def name: String = original.name
+ override def output: (PiObject, String) = original.output
+ override def inputs: Seq[(PiObject, String)] = original.inputs
+
+ override def runMeta(args: Seq[PiObject])(implicit ec: ExecutionContext): Future[(PiObject, PiMetadataMap)]
+ = original.run(args).map((_, PiMetadata()))
+ }
+ }
*/
}
-case class DummyProcess(override val name:String, override val channels:Seq[String], outChan:String, override val inputs:Seq[(PiObject,String)]) extends AtomicProcess {
- override val output = (PiItem(None),outChan)
- override def run(args:Seq[PiObject])(implicit ec:ExecutionContext):Future[PiObject] = Future.successful(output._1)
+trait AtomicProcess extends MetadataAtomicProcess {
+ /** Implements the standard AtomicProcess interface for unsupporting ProcessExecutors. */
+ final override def runMeta(args: Seq[PiObject])(implicit ec: ExecutionContext): Future[MetadataAtomicResult] =
+ run(args).map(MetadataAtomicProcess.result(_))
+
+ def run(args: Seq[PiObject])(implicit ec: ExecutionContext): Future[PiObject]
}
+case class DummyProcess(
+ override val name: String,
+ override val channels: Seq[String],
+ outChan: String,
+ override val inputs: Seq[(PiObject, String)]
+) extends AtomicProcess {
-trait CompositeProcess extends PiProcess {
- /**
- * The body of the composition as a pi-calculus term constructed via proof.
- */
- def body:Term
-
- /**
- * Calling function that instantiates the body with a given set of channel arguments.
- */
- def call(m:ChanMap):Term = body.sub(m)
- def call(args:Chan*):Term = call(mapArgs(args:_*))
+ override val output: (PiObject, String) = (PiItem(None), outChan)
+ override def run(args: Seq[PiObject])(implicit ec: ExecutionContext): Future[PiObject] = Future.successful(output._1)
}
-case class DummyComposition(override val name:String, i:String, o:String, n:String) extends CompositeProcess {
- override val output = (Chan(n),o)
- override val inputs = Seq((Chan(n),i))
- override val channels = Seq(i,o)
- override val body = PiId(i,o,n)
- override val dependencies:Seq[PiProcess] = Seq()
+trait CompositeProcess extends PiProcess {
+ /** The body of the composition as a pi-calculus term constructed via proof. */
+ def body: Term
+
+ /** Calling function that instantiates the body with a given set of channel arguments. */
+ def call(m: ChanMap): Term = body.sub(m)
+ def call(args: Chan*): Term = call(mapArgs(args: _*))
}
+case class DummyComposition(override val name: String, i: String, o: String, n: String) extends CompositeProcess {
+
+ override val output: (PiObject, String) = (Chan(n), o)
+ override val inputs: Seq[(PiObject, String)] = Seq((Chan(n), i))
+ override val channels: Seq[String] = Seq(i, o)
+ override val body = PiId(i, o, n)
+ override val dependencies: Seq[PiProcess] = Seq()
+}
trait PiProcessStore {
- /**
- * Get a PiProcess by its *instance* name.
- */
- def get(name:String):Option[PiProcess]
- def getAll:Seq[PiProcess]
-
- def getOrElse[B >: PiProcess](name:String,default: => B):B = get(name) match {
- case None => default
- case Some(r) => r
- }
- def entryOf(name:String):Option[(String,PiProcess)] = get(name) map (_.toEntry)
- def toMap:Map[String,PiProcess] = Map(getAll map (_.toEntry) :_*)
- def toIMap:Map[String,PiProcess] = Map(getAll map (_.toIEntry) :_*)
+ /** Get a PiProcess by its *instance* name. */
+ def get(name: String): Option[PiProcess]
+ def getAll: Seq[PiProcess]
+
+ def getOrElse[B >: PiProcess](name: String, default: => B): B =
+ get(name) match {
+ case None => default
+ case Some(r) => r
+ }
+ def entryOf(name: String): Option[(String, PiProcess)] = get(name) map (_.toEntry)
+ def toMap: Map[String, PiProcess] = Map(getAll map (_.toEntry): _*)
+ def toIMap: Map[String, PiProcess] = Map(getAll map (_.toIEntry): _*)
}
object PiProcessStore {
- /**
- * Shortcut to create name->PiProcess maps from a seq of processes.
- * These are used within PiState.
- */
- def mapOf(l:PiProcess*):Map[String,PiProcess] = (Map[String,PiProcess]() /: l)(_ + _.toEntry)
+ /**
+ * Shortcut to create name->PiProcess maps from a seq of processes.
+ * These are used within PiState.
+ */
+ def mapOf(l: PiProcess*): Map[String, PiProcess] = (Map[String, PiProcess]() /: l)(_ + _.toEntry)
}
-case class SimpleProcessStore(m:Map[String,PiProcess]) extends PiProcessStore {
- override def get(name:String):Option[PiProcess] = m.get(name)
- override def getAll:Seq[PiProcess] = m.values.toSeq
+case class SimpleProcessStore(m: Map[String, PiProcess]) extends PiProcessStore {
+
+ override def get(name: String): Option[PiProcess] = m.get(name)
+ override def getAll: Seq[PiProcess] = m.values.toSeq
}
object SimpleProcessStore {
- def apply(l:PiProcess*):SimpleProcessStore = SimpleProcessStore((Map[String,PiProcess]() /: l) (_ + _.toIEntry))
- //def procs(l:PiProcess*):SimpleProcessStore = SimpleProcessStore((Map[String,PiProcess]() /: l) (_ + _.toEntry))
+ def apply(l: PiProcess*): SimpleProcessStore = SimpleProcessStore((Map[String, PiProcess]() /: l)(_ + _.toIEntry))
+ //def procs(l: PiProcess*): SimpleProcessStore = SimpleProcessStore((Map[String, PiProcess]() /: l) (_ + _.toEntry))
}
diff --git a/src/com/workflowfm/pew/PiState.scala b/src/com/workflowfm/pew/PiState.scala
index 6af9960..67e86c4 100644
--- a/src/com/workflowfm/pew/PiState.scala
+++ b/src/com/workflowfm/pew/PiState.scala
@@ -3,209 +3,239 @@ package com.workflowfm.pew
import scala.annotation.tailrec
/**
- * This is the pi-calculus state of a running workflow.
- *
- * We separate inputs from outputs to make searching for matching channels more efficient.
- * We could have everything in one list like PiLib does.
- *
- * @param inputs a map of Input type continuations based on their primary channels
- * @param outputs a map of Output type continuations based on their primary channels
- * @param calls a list of processes that have been called but the inputs haven't arrived yet
- * @param threads a map of processes that have been called and whose results we want. The
- * calls to the actual processes should happen outside the PiState by a
- * process executor, so that things are kept clean and independent here. Each
- * call has a unique reference number so that results don't clash.
- * @param threadCtr an integer to help create unique reference numbers for threads
- * @param freshCtr an integer to help create unique (fresh) channel names
- * @param processes a map of available processes, both atomic and composite, so that we know
- * how to handle calls when they occur
- * @param resources a ChanMap of unhandled resources. We try to keep this as clean as possible,
- * so that it only contains resources that need to be received by a pending call
- * (though this is not guaranteed, we might get some leftover garbage in here as well).
- */
-case class PiState(inputs:Map[Chan,Input], outputs:Map[Chan,Output], calls:List[PiFuture], threads:Map[Int,PiFuture], threadCtr:Int, freshCtr:Int, processes:Map[String,PiProcess], resources:ChanMap) {
-
- /**
- * Introducing a term can have many different effects on the state.
- * e.g. an Input term is simply added to the inputs, but a PiCut term adds new terms to both
- * inputs and outputs and it affects freshCtr.
- * We also want to leave enough room for any future types of terms we might want to add.
- * We therefore defer the state effect to be determined by the Term itself.
- */
- def withTerm(t:Term):PiState = {
- //System.err.println("*** Handling term: " + t)
- t.addTo(this)
- }
- def withTerms(t:Seq[Term]):PiState = (this /: t)(_ withTerm _)
-
- def withSub(c:Chan,a:PiObject):PiState = copy(resources=resources + (c,a))
- def withSub(c:String,o:PiObject):PiState = withSub(Chan(c),o)
- def withSubs(m:Map[Chan,PiObject]) = copy(resources=resources ++ m)
- def withSubs(l:Seq[(Chan,PiObject)]) = copy(resources=resources ++ l)
- def withSubs(m:ChanMap) = copy(resources=resources ++ m)
-
- def withProc(p:PiProcess) = copy(processes = processes + (p.name->p))
- def withProcs(l:PiProcess*) = (this /: l)(_ withProc _)
-
- def withCalls(l:PiFuture*) = copy(calls = l.toList ++ calls)
-
- def withThread(ref:Int, name:String, chan:String, args:Seq[PiResource]) = withThreads((ref,PiFuture(name,Chan(chan),args)))
- def withThreads(t:(Int,PiFuture)*) = copy(threads = threads ++ (t map { x => x._1 -> x._2 }))
- def removeThread(ref:Int) = copy(threads=threads - ref)
- def removeThreads(refs:Iterable[Int]) = copy(threads=threads -- refs)
-
- def withTCtr(i:Int) = copy(threadCtr = i)
- def incTCtr() = copy(threadCtr = threadCtr + 1)
-
- def withFCtr(i:Int) = copy(freshCtr = i)
- def incFCtr() = copy(freshCtr = freshCtr + 1)
-
- private def removeIO(c:Chan):PiState = copy(inputs=inputs - c,outputs=outputs - c)
-
- /**
- * Performs either a single pi-calculus reduction or else handles the first available call with completed inputs.
- * The intersection of key sets detects common channels between inputs and outputs, which can then communicate.
- */
- def reduce():Option[PiState] = communicateFirst(inputs.keySet intersect outputs.keySet toList) match {
+ * This is the pi-calculus state of a running workflow.
+ *
+ * We separate inputs from outputs to make searching for matching channels more efficient.
+ * We could have everything in one list like PiLib does.
+ *
+ * @param inputs a map of Input type continuations based on their primary channels
+ * @param outputs a map of Output type continuations based on their primary channels
+ * @param calls a list of processes that have been called but the inputs haven't arrived yet
+ * @param threads a map of processes that have been called and whose results we want. The
+ * calls to the actual processes should happen outside the PiState by a
+ * process executor, so that things are kept clean and independent here. Each
+ * call has a unique reference number so that results don't clash.
+ * @param threadCtr an integer to help create unique reference numbers for threads
+ * @param freshCtr an integer to help create unique (fresh) channel names
+ * @param processes a map of available processes, both atomic and composite, so that we know
+ * how to handle calls when they occur
+ * @param resources a ChanMap of unhandled resources. We try to keep this as clean as possible,
+ * so that it only contains resources that need to be received by a pending call
+ * (though this is not guaranteed, we might get some leftover garbage in here as well).
+ */
+case class PiState(
+ inputs: Map[Chan, Input],
+ outputs: Map[Chan, Output],
+ calls: List[PiFuture],
+ threads: Map[Int, PiFuture],
+ threadCtr: Int,
+ freshCtr: Int,
+ processes: Map[String, PiProcess],
+ resources: ChanMap
+) {
+
+ /**
+ * Introducing a term can have many different effects on the state.
+ * e.g. an Input term is simply added to the inputs, but a PiCut term adds new terms to both
+ * inputs and outputs and it affects freshCtr.
+ * We also want to leave enough room for any future types of terms we might want to add.
+ * We therefore defer the state effect to be determined by the Term itself.
+ */
+ def withTerm(t: Term): PiState = {
+ //System.err.println("*** Handling term: " + t)
+ t.addTo(this)
+ }
+ def withTerms(t: Seq[Term]): PiState = (this /: t)(_ withTerm _)
+
+ def withSub(c: Chan, a: PiObject): PiState = copy(resources = resources + (c, a))
+ def withSub(c: String, o: PiObject): PiState = withSub(Chan(c), o)
+ def withSubs(m: Map[Chan, PiObject]): PiState = copy(resources = resources ++ m)
+ def withSubs(l: Seq[(Chan, PiObject)]): PiState = copy(resources = resources ++ l)
+ def withSubs(m: ChanMap): PiState = copy(resources = resources ++ m)
+
+ def withProc(p: PiProcess): PiState = copy(processes = processes + (p.name -> p))
+ def withProcs(l: PiProcess*): PiState = (this /: l)(_ withProc _)
+
+ def withCalls(l: PiFuture*): PiState = copy(calls = l.toList ++ calls)
+
+ def withThread(ref: Int, name: String, chan: String, args: Seq[PiResource]): PiState =
+ withThreads((ref, PiFuture(name, Chan(chan), args)))
+ def withThreads(t: (Int, PiFuture)*): PiState =
+ copy(threads = threads ++ (t map { x =>
+ x._1 -> x._2
+ }))
+ def removeThread(ref: Int): PiState = copy(threads = threads - ref)
+ def removeThreads(refs: Iterable[Int]): PiState = copy(threads = threads -- refs)
+
+ def withTCtr(i: Int): PiState = copy(threadCtr = i)
+ def incTCtr(): PiState = copy(threadCtr = threadCtr + 1)
+
+ def withFCtr(i: Int): PiState = copy(freshCtr = i)
+ def incFCtr(): PiState = copy(freshCtr = freshCtr + 1)
+
+ private def removeIO(c: Chan): PiState = copy(inputs = inputs - c, outputs = outputs - c)
+
+ /**
+ * Performs either a single pi-calculus reduction or else handles the first available call with completed inputs.
+ * The intersection of key sets detects common channels between inputs and outputs, which can then communicate.
+ */
+ def reduce(): Option[PiState] =
+ communicateFirst(inputs.keySet intersect outputs.keySet toList) match {
case Some(s) => Some(s)
- case None => executeFirst()
+ case None => executeFirst()
}
-
- /**
- * Perform all possible reductions to the state.
- */
- @tailrec
- final def fullReduce():PiState = reduce() match {
- case None => this
- case Some(s) => {
+
+ /** Perform all possible reductions to the state. */
+ @tailrec
+ final def fullReduce(): PiState =
+ reduce() match {
+ case None => this
+ case Some(s) =>
//System.err.println(s)
s.fullReduce()
- }
}
-
- /**
- * Performs the first possible communication from a list of common channels.
- */
- @tailrec
- private def communicateFirst(l:List[Chan]):Option[PiState] = l match {
+
+ /** Performs the first possible communication from a list of common channels. */
+ @tailrec
+ private def communicateFirst(l: List[Chan]): Option[PiState] =
+ l match {
case Nil => None
- case h :: t => communicate(h) match {
+ case h :: t =>
+ communicate(h) match {
case None => communicateFirst(t)
- case s => s
- }
+ case s => s
+ }
}
-
- /**
- * Attempts to perform a communication through channel k.
- * First uses the send method of the output to obtain the outputValue.
- * Then checks if the corresponding input can receive the outputValue.
- * It then uses the receive method of the input to pass the outputValue.
- * Finally it updates the state with continuations from both the input and output,
- * adds unhandled channel mappings to resources, and removes the input and output
- * from their respective maps.
- */
- private def communicate(k:Chan) = {
- val (oterms,outputValue,newstate) = outputs.get(k).get.send(this)
- inputs.get(k) flatMap { i =>
- if (!i.admits(outputValue)) None
- else {
- //System.err.println("*** Communicating [" + outputValue + "] through channel: " + k)
- val (iterms,newres) = inputs.get(k).get.receive(outputValue)
- Some(newstate withTerms oterms withTerms iterms withSubs newres removeIO k)
- }
- }
+
+ /**
+ * Attempts to perform a communication through channel k.
+ * First uses the send method of the output to obtain the outputValue.
+ * Then checks if the corresponding input can receive the outputValue.
+ * It then uses the receive method of the input to pass the outputValue.
+ * Finally it updates the state with continuations from both the input and output,
+ * adds unhandled channel mappings to resources, and removes the input and output
+ * from their respective maps.
+ */
+ private def communicate(k: Chan): Option[PiState] = {
+ val (oTerms, outputValue, newState) = outputs(k).send(this)
+ inputs.get(k) flatMap { i =>
+ if (!i.admits(outputValue)) {
+ None
+ } else {
+ //System.err.println("*** Communicating [" + outputValue + "] through channel: " + k)
+ val (iTerms, newRes) = inputs(k).receive(outputValue)
+ Some(newState withTerms oTerms withTerms iTerms withSubs newRes removeIO k)
+ }
}
-
- /**
- * Handles process calls.
- * For atomic process calls, we create an instantiated PiFuture and add it to the calls.
- * We also generate and add the process input terms.
- * For composite process calls we instantiate the body and add it as a term.
- */
- def handleCall(c:PiCall):PiState = processes get c.name match {
- case None => {
+ }
+
+ /**
+ * Handles process calls.
+ * For atomic process calls, we create an instantiated PiFuture and add it to the calls.
+ * We also generate and add the process input terms.
+ * For composite process calls we instantiate the body and add it as a term.
+ */
+ def handleCall(c: PiCall): PiState =
+ processes get c.name match {
+ case None =>
System.err.println("Failed to find process: " + c.name)
this
- }
- case Some(p:MetadataAtomicProcess) => {
+ case Some(p: MetadataAtomicProcess) =>
//System.err.println("*** Handling atomic call: " + c.name)
- val m = p.mapFreshArgs(freshCtr,c.args:_*)
- copy(calls = p.getFuture(freshCtr,m) +: calls) withTerms p.getInputs(freshCtr,m) incFCtr()
- }
- case Some(p:CompositeProcess) => {
+ val m = p.mapFreshArgs(freshCtr, c.args: _*)
+ copy(calls = p.getFuture(freshCtr, m) +: calls) withTerms p.getInputs(freshCtr, m) incFCtr ()
+ case Some(p: CompositeProcess) =>
//System.err.println("*** Handling composite call: " + c.name)
- val m = p.mapFreshArgs(freshCtr,c.args:_*)
- this withTerm p.body.fresh(freshCtr).sub(m) incFCtr()
- }
+ val m = p.mapFreshArgs(freshCtr, c.args: _*)
+ this withTerm p.body.fresh(freshCtr).sub(m) incFCtr ()
}
-
- /**
- * By 'execute' we mean converting a call to a thread.
- * This means checking a process in the calls list about whether its inputs have arrived.
- * This executes the first process (if any) in calls whose inputs have arrived.
- */
- def executeFirst():Option[PiState] = {
- def executeFirstRec(l:List[PiFuture]):Option[(PiState,List[PiFuture])] = l match {
- case Nil => None
- case h :: t => execute(h) match {
- case None => executeFirstRec(t) match {
- case None => None
- case Some((s,r)) => Some((s, h::r))
- }
- case Some(s) => Some((s,t))
+
+ /**
+ * By 'execute' we mean converting a call to a thread.
+ * This means checking a process in the calls list about whether its inputs have arrived.
+ * This executes the first process (if any) in calls whose inputs have arrived.
+ */
+ def executeFirst(): Option[PiState] = {
+ def executeFirstRec(l: List[PiFuture]): Option[(PiState, List[PiFuture])] =
+ l match {
+ case Nil => None
+ case h :: t =>
+ execute(h) match {
+ case None =>
+ executeFirstRec(t) match {
+ case None => None
+ case Some((s, r)) => Some((s, h :: r))
+ }
+ case Some(s) => Some((s, t))
}
- }
- executeFirstRec(calls) match {
- case None => None
- case Some((s,l)) => Some(s.copy(calls = l))
- }
}
-
- /**
- * This converts a PiFuture in the calls list to an instatiated thread to be executed externally.
- * A unique thread id is generated using threadCtr.
- * We also remove the mappings of all free channels in the process inputs from the resources list.
- * These get used up in the process execution and are no longer needed. This helps keep the resources list clean.
- */
- def execute(p:PiFuture):Option[PiState] = {
- //val frees = (processes get p.fun toSeq) flatMap (_.inputFrees())
- val frees = p.args map (_.obj.frees)
- p.execute(resources) map { fut => copy(threads = threads + (threadCtr->fut),threadCtr = threadCtr + 1, resources = resources -- (frees.flatten :_*)) }
+
+ executeFirstRec(calls) match {
+ case None => None
+ case Some((s, l)) => Some(s.copy(calls = l))
}
-
- /**
- * This method should be used by the external executor to report the result of an executed thread and obtain the appropriate new state.
- * The PiFuture is removed from the threads list and its output Term is added, with the given object as the output resource.
- */
- def result(ref:Int,res:PiObject):Option[PiState] =
- threads get ref map { fut =>
- copy(threads = threads - ref) withTerm fut.toOutput(res)
- }
-
- /**
- * Executes a thread handling function over all threads.
- * Handler is expected to have side-effects (typically some Future trigger).
- * Handler should return true if a thread was handled successfully, false if it failed.
- * Returns the updated state containing only threads that were handled successfully.
- */
- def handleThreads(handler:((Int,PiFuture))=>Boolean) = copy(threads = threads filter handler)
+ }
+
+ /**
+ * This converts a PiFuture in the calls list to an instatiated thread to be executed externally.
+ * A unique thread id is generated using threadCtr.
+ * We also remove the mappings of all free channels in the process inputs from the resources list.
+ * These get used up in the process execution and are no longer needed. This helps keep the resources list clean.
+ */
+ def execute(p: PiFuture): Option[PiState] = {
+ //val frees = (processes get p.fun toSeq) flatMap (_.inputFrees())
+ val frees = p.args map (_.obj.frees)
+ p.execute(resources) map { fut =>
+ copy(
+ threads = threads + (threadCtr -> fut),
+ threadCtr = threadCtr + 1,
+ resources = resources -- (frees.flatten: _*)
+ )
+ }
+ }
+
+ /**
+ * This method should be used by the external executor to report the result of an executed thread and obtain the appropriate new state.
+ * The PiFuture is removed from the threads list and its output Term is added, with the given object as the output resource.
+ */
+ def result(ref: Int, res: PiObject): Option[PiState] =
+ threads get ref map { fut =>
+ copy(threads = threads - ref) withTerm fut.toOutput(res)
+ }
+
+ /**
+ * Executes a thread handling function over all threads.
+ * Handler is expected to have side-effects (typically some Future trigger).
+ * Handler should return true if a thread was handled successfully, false if it failed.
+ * Returns the updated state containing only threads that were handled successfully.
+ */
+ def handleThreads(handler: ((Int, PiFuture)) => Boolean): PiState = copy(threads = threads filter handler)
}
object PiState {
- def apply(t:Term*):PiState = PiState(Map():Map[Chan,Input],Map():Map[Chan,Output],List(),Map():Map[Int,PiFuture],0,0,Map():Map[String,PiProcess],ChanMap()) withTerms t
- def apply(t:Seq[Term],l:Seq[(Chan,PiObject)]):PiState = PiState(t:_*) withSubs l
+ def apply(t: Term*): PiState =
+ PiState(
+ Map(): Map[Chan, Input],
+ Map(): Map[Chan, Output],
+ List(),
+ Map(): Map[Int, PiFuture],
+ 0,
+ 0,
+ Map(): Map[String, PiProcess],
+ ChanMap()
+ ) withTerms t
+ def apply(t: Seq[Term], l: Seq[(Chan, PiObject)]): PiState = PiState(t: _*) withSubs l
}
trait PiStateTester {
-
- def reduceOnce(t:ChannelTerm*) = PiState(t:_*).reduce()
+ def reduceOnce(t: ChannelTerm*): Option[PiState] = PiState(t: _*).reduce()
+
+ def reduce(t: ChannelTerm*): PiState = reduceState(PiState(t: _*))
+
+ def reduceState(s: PiState): PiState = s.fullReduce()
- def reduce(t:ChannelTerm*):PiState = reduceState(PiState(t:_*))
-
- def reduceState(s:PiState):PiState = s.fullReduce()
+ def reduceGet(r: String, t: ChannelTerm*): PiObject =
+ reduceState(PiState(t: _*)).resources.obtain(Chan(r))
- def reduceGet(r:String,t:ChannelTerm*):PiObject =
- reduceState(PiState(t:_*)).resources.obtain(Chan(r))
-
- def fState(l:(Chan,PiObject)*) = PiState(List(),l)
+ def fState(l: (Chan, PiObject)*) = PiState(List(), l)
}
diff --git a/src/com/workflowfm/pew/PiTerms.scala b/src/com/workflowfm/pew/PiTerms.scala
index 3f824f3..8f8c422 100644
--- a/src/com/workflowfm/pew/PiTerms.scala
+++ b/src/com/workflowfm/pew/PiTerms.scala
@@ -1,317 +1,332 @@
package com.workflowfm.pew
/**
- * A Term is a pi-calculus pattern that can be encountered in a proofs-as-processes composition.
- */
+ * A Term is a pi-calculus pattern that can be encountered in a proofs-as-processes composition.
+ */
sealed trait Term {
- def sub(s:ChanMap):Term
- def fresh(i:Int):Term
- def addTo(s:PiState):PiState
+ def sub(s: ChanMap): Term
+ def fresh(i: Int): Term
+ def addTo(s: PiState): PiState
}
/**
- * A ChannelTerm is a Term that depends on a particular channel to operate.
- * Essentially the equivalent of a pi-calculus continuation.
- */
+ * A ChannelTerm is a Term that depends on a particular channel to operate.
+ * Essentially the equivalent of a pi-calculus continuation.
+ */
sealed trait ChannelTerm extends Term {
- def channel:Chan
+ def channel: Chan
}
/**
- * Input Terms for proofs-as-processes compositions.
- */
+ * Input Terms for proofs-as-processes compositions.
+ */
sealed trait Input extends ChannelTerm {
/**
- * Returns True if the Input is able to receive a given PiObject.
- * For example, a PiLeft can only be received by an optional input.
- */
- def admits(a:PiObject):Boolean
-
+ * Returns True if the Input is able to receive a given PiObject.
+ * For example, a PiLeft can only be received by an optional input.
+ */
+ def admits(a: PiObject): Boolean
+
/**
- * This passes a PiObject to be received through this Input's channel.
- * Returns the list of Terms (continuations) that follow and a (potential)
- * channel mapping of any received resources.
- */
- def receive(a:PiObject):(List[Term],ChanMap)
-
- override def sub(s:ChanMap):ChannelTerm
- override def addTo(s:PiState):PiState = s.copy(inputs=s.inputs + (this.channel->this))
+ * This passes a PiObject to be received through this Input's channel.
+ * Returns the list of Terms (continuations) that follow and a (potential)
+ * channel mapping of any received resources.
+ */
+ def receive(a: PiObject): (List[Term], ChanMap)
+
+ override def sub(s: ChanMap): ChannelTerm
+ override def addTo(s: PiState): PiState = s.copy(inputs = s.inputs + (this.channel -> this))
}
object Input {
/**
- * Creates an Input that can receive the pattern described by the PiObject through the given channel.
- */
- def of(res:PiObject,channel:String):Input = res match {
- case PiPair(l,r) => ParIn(channel,channel + "#L",channel + "#R",of(l,channel + "#L"),of(r,channel + "#R")) // TODO must be unique channels! Can we get these from the prover?
- case PiOpt(l,r) => WithIn(channel,channel + "#L",channel + "#R",of(l,channel + "#L"),of(r,channel + "#R"))
- case Chan(s) => Devour(channel,s)
- case _ => Devour(channel,channel+"##")
- }
+ * Creates an Input that can receive the pattern described by the PiObject through the given channel.
+ */
+ def of(res: PiObject, channel: String): Input =
+ res match {
+ case PiPair(l, r) =>
+ // TODO must be unique channels! Can we get these from the prover?
+ ParIn(channel, channel + "#L", channel + "#R", of(l, channel + "#L"), of(r, channel + "#R"))
+ case PiOpt(l, r) =>
+ WithIn(channel, channel + "#L", channel + "#R", of(l, channel + "#L"), of(r, channel + "#R"))
+ case Chan(s) => Devour(channel, s)
+ case _ => Devour(channel, channel + "##")
+ }
}
/**
- * c(v).0
- * Receives any PiObject through channel c and maps it to channel v.
- */
-case class Devour(c:String,v:String) extends Input {
- override val channel:Chan = Chan(c)
- override def admits(a:PiObject):Boolean = true
- override def sub(s:ChanMap) = Devour(s.resolve(c),v)
- override def fresh(i:Int) = Devour(c + "#" + i,v)
-
- override def receive(a:PiObject):(List[Term],ChanMap) = (List(),ChanMap((Chan(v),a)))
-}
+ * c(v).0
+ * Receives any PiObject through channel c and maps it to channel v.
+ */
+case class Devour(c: String, v: String) extends Input {
+ override val channel: Chan = Chan(c)
+ override def admits(a: PiObject): Boolean = true
+ override def sub(s: ChanMap) = Devour(s.resolve(c), v)
+ override def fresh(i: Int) = Devour(c + "#" + i, v)
-/**
- * c(v).cont
- * Receives an atomic PiObject (channel or item) through channel c, maps it to channel v,
- * applies the resulting substitution to the continuation and returns the result.
- */
-case class In(c:String,v:String,cont:Term) extends Input {
- override val channel:Chan = Chan(c)
- override def admits(a:PiObject):Boolean = a match {
- case Chan(_) => true
- case PiItem(_) => true
- case _ => false
- }
- override def sub(s:ChanMap) = In(s.resolve(c),s.resolve(v),cont.sub(s)) // TODO this may need improvement to enforce name binding.
- override def fresh(i:Int) = In(c + "#" + i,v,cont.fresh(i))
-
- override def receive(a:PiObject):(List[Term],ChanMap) = (List(cont.sub(ChanMap((Chan(v),a)))),ChanMap())
+ override def receive(a: PiObject): (List[Term], ChanMap) = (List(), ChanMap((Chan(v), a)))
}
/**
- * c(lv,rv).(left | right)
- * Parallel input. Receives a PiPair, instantiates each continuation appropriately and returns them.
- */
-case class ParIn(c:String,lv:String,rv:String,left:Term,right:Term) extends Input {
- override val channel:Chan = Chan(c)
- override def admits(a:PiObject):Boolean = a match {
- case PiPair(_,_) => true
- case _ => false
- }
- override def sub(s:ChanMap) = ParIn(s.resolve(c),s.resolve(lv),s.resolve(rv),left.sub(s),right.sub(s)) // TODO this may need improvement to enforce name binding.
- override def fresh(i:Int) = ParIn(c + "#" + i,lv + "#" + i,rv + "#" + i,left.fresh(i),right.fresh(i))
-
- override def receive(a:PiObject):(List[Term],ChanMap) = a match {
- case PiPair(l,r) => (List(left.sub(ChanMap((Chan(lv),l))),right.sub(ChanMap((Chan(rv),r)))),ChanMap())
- case _ => (List(this),ChanMap())
+ * c(v).cont
+ * Receives an atomic PiObject (channel or item) through channel c, maps it to channel v,
+ * applies the resulting substitution to the continuation and returns the result.
+ */
+case class In(c: String, v: String, cont: Term) extends Input {
+ override val channel: Chan = Chan(c)
+ override def admits(a: PiObject): Boolean = a match {
+ case Chan(_) => true
+ case PiItem(_) => true
+ case _ => false
}
+ override def sub(s: ChanMap) =
+ // TODO this may need improvement to enforce name binding.
+ In(s.resolve(c), s.resolve(v), cont.sub(s))
+ override def fresh(i: Int) = In(c + "#" + i, v, cont.fresh(i))
+
+ override def receive(a: PiObject): (List[Term], ChanMap) = (List(cont.sub(ChanMap((Chan(v), a)))), ChanMap())
}
/**
- * c(lv,rv).cont
- * Parallel input with single continuation. This occurs from the application of the Par CLL rule.
- */
-case class ParInI(c:String,lv:String,rv:String,cont:Term) extends Input {
- override val channel:Chan = Chan(c)
- override def admits(a:PiObject):Boolean = a match {
- case PiPair(_,_) => true
- case _ => false
- }
- override def sub(s:ChanMap) = ParInI(s.resolve(c),s.resolve(lv),s.resolve(rv),cont.sub(s)) // TODO this may need improvement to enforce name binding.
- override def fresh(i:Int) = ParInI(c + "#" + i,lv + "#" + i,rv + "#" + i,cont.fresh(i))
-
- override def receive(a:PiObject):(List[Term],ChanMap) = a match {
- case PiPair(l,r) => (List(cont.sub(ChanMap((Chan(lv),l),(Chan(rv),r)))),ChanMap())
- case _ => (List(this),ChanMap())
+ * c(lv,rv).(left | right)
+ * Parallel input. Receives a PiPair, instantiates each continuation appropriately and returns them.
+ */
+case class ParIn(c: String, lv: String, rv: String, left: Term, right: Term) extends Input {
+ override val channel: Chan = Chan(c)
+ override def admits(a: PiObject): Boolean = a match {
+ case PiPair(_, _) => true
+ case _ => false
+ }
+ override def sub(s: ChanMap) =
+ // TODO this may need improvement to enforce name binding.
+ ParIn(s.resolve(c), s.resolve(lv), s.resolve(rv), left.sub(s), right.sub(s))
+ override def fresh(i: Int) = ParIn(c + "#" + i, lv + "#" + i, rv + "#" + i, left.fresh(i), right.fresh(i))
+
+ override def receive(a: PiObject): (List[Term], ChanMap) = a match {
+ case PiPair(l, r) => (List(left.sub(ChanMap((Chan(lv), l))), right.sub(ChanMap((Chan(rv), r)))), ChanMap())
+ case _ => (List(this), ChanMap())
}
}
/**
- * c(lv,rv).(left + right)
- * Optional input. Can only receive a PiLeft or PiRight. Instantiates and returns the corresponding continuation.
- */
-case class WithIn(c:String,lv:String,rv:String,left:Term,right:Term) extends Input {
- override val channel:Chan = Chan(c)
- override def admits(a:PiObject):Boolean = a match {
- case PiLeft(_) => true
- case PiRight(_) => true
- case _ => false
- }
- override def sub(s:ChanMap) = WithIn(s.resolve(c),s.resolve(lv),s.resolve(rv),left.sub(s),right.sub(s)) // TODO this may need improvement to enforce name binding.
- override def fresh(i:Int) = WithIn(c + "#" + i,lv + "#" + i,rv + "#" + i,left.fresh(i),right.fresh(i))
-
- override def receive(a:PiObject):(List[Term],ChanMap) = a match {
- case PiLeft(l) => (List(left.sub(ChanMap((Chan(lv),l)))),ChanMap())
- case PiRight(r) => (List(right.sub(ChanMap((Chan(rv),r)))),ChanMap())
- case _ => (List(this),ChanMap())
+ * c(lv,rv).cont
+ * Parallel input with single continuation. This occurs from the application of the Par CLL rule.
+ */
+case class ParInI(c: String, lv: String, rv: String, cont: Term) extends Input {
+ override val channel: Chan = Chan(c)
+ override def admits(a: PiObject): Boolean = a match {
+ case PiPair(_, _) => true
+ case _ => false
}
-}
+ override def sub(s: ChanMap) =
+ // TODO this may need improvement to enforce name binding.
+ ParInI(s.resolve(c), s.resolve(lv), s.resolve(rv), cont.sub(s))
+ override def fresh(i: Int) = ParInI(c + "#" + i, lv + "#" + i, rv + "#" + i, cont.fresh(i))
+ override def receive(a: PiObject): (List[Term], ChanMap) = a match {
+ case PiPair(l, r) => (List(cont.sub(ChanMap((Chan(lv), l), (Chan(rv), r)))), ChanMap())
+ case _ => (List(this), ChanMap())
+ }
+}
+/**
+ * c(lv,rv).(left + right)
+ * Optional input. Can only receive a PiLeft or PiRight. Instantiates and returns the corresponding continuation.
+ */
+case class WithIn(c: String, lv: String, rv: String, left: Term, right: Term) extends Input {
+ override val channel: Chan = Chan(c)
+ override def admits(a: PiObject): Boolean = a match {
+ case PiLeft(_) => true
+ case PiRight(_) => true
+ case _ => false
+ }
+ override def sub(s: ChanMap) =
+ // TODO this may need improvement to enforce name binding.
+ WithIn(s.resolve(c), s.resolve(lv), s.resolve(rv), left.sub(s), right.sub(s))
+ override def fresh(i: Int) = WithIn(c + "#" + i, lv + "#" + i, rv + "#" + i, left.fresh(i), right.fresh(i))
+ override def receive(a: PiObject): (List[Term], ChanMap) = a match {
+ case PiLeft(l) => (List(left.sub(ChanMap((Chan(lv), l)))), ChanMap())
+ case PiRight(r) => (List(right.sub(ChanMap((Chan(rv), r)))), ChanMap())
+ case _ => (List(this), ChanMap())
+ }
+}
/**
- * Output Terms for proofs-as-processes compositions.
- */
+ * Output Terms for proofs-as-processes compositions.
+ */
sealed trait Output extends ChannelTerm {
/**
- * Sends its output through the given PiState
- * Returns a list of continuations, the PiObject it wants to send, and an updated PiState.
- * We often need to update the freshness counter of the state, hence the 3rd output.
- */
- def send(s:PiState):(List[Term],PiObject,PiState)
- override def addTo(s:PiState):PiState = s.copy(outputs=s.outputs + (this.channel->this))
+ * Sends its output through the given PiState
+ * Returns a list of continuations, the PiObject it wants to send, and an updated PiState.
+ * We often need to update the freshness counter of the state, hence the 3rd output.
+ */
+ def send(s: PiState): (List[Term], PiObject, PiState)
+ override def addTo(s: PiState): PiState = s.copy(outputs = s.outputs + (this.channel -> this))
}
object Output {
/**
- * Creates an Output that can send the given PiObject through the given channel.
- */
- def of(res:PiObject,channel:String):Output = res match {
- case PiPair(l,r) => ParOut(channel,channel + "#L",channel + "#R",of(l,channel + "#L"),of(r,channel + "#R")) // TODO must be unique channels! Can we get these from the prover?
- case PiLeft(l) => LeftOut(channel,channel + "#L",of(l,channel + "#L"))
- case PiRight(r) => RightOut(channel,channel + "#R",of(r,channel + "#R"))
- case _ => Out(channel,res)
- }
+ * Creates an Output that can send the given PiObject through the given channel.
+ */
+ def of(res: PiObject, channel: String): Output = res match {
+ case PiPair(l, r) =>
+ // TODO must be unique channels! Can we get these from the prover?
+ ParOut(channel, channel + "#L", channel + "#R", of(l, channel + "#L"), of(r, channel + "#R"))
+ case PiLeft(l) => LeftOut(channel, channel + "#L", of(l, channel + "#L"))
+ case PiRight(r) => RightOut(channel, channel + "#R", of(r, channel + "#R"))
+ case _ => Out(channel, res)
+ }
}
/**
- * 'c.0
- * Sends a PiObject through a channel.
- */
-case class Out(c:String,a:PiObject) extends Output {
- override val channel:Chan = Chan(c)
- override def sub(s:ChanMap) = Out(s.resolve(c),s.sub(a))
- override def fresh(i:Int) = Out(c + "#" + i,a)
- override def send(s:PiState):(List[Term],PiObject,PiState) = (List(),a,s)
+ * 'c.0
+ * Sends a PiObject through a channel.
+ */
+case class Out(c: String, a: PiObject) extends Output {
+ override val channel: Chan = Chan(c)
+ override def sub(s: ChanMap) = Out(s.resolve(c), s.sub(a))
+ override def fresh(i: Int) = Out(c + "#" + i, a)
+ override def send(s: PiState): (List[Term], PiObject, PiState) = (List(), a, s)
}
/**
- * 'c.(left | right)
- * Parallel output.
- * In proofs-as-processes, parallel communication (i.e. of a pair of objects) happens in 2 stages:
- * 1) Use a common channel to send 2 new channels, one for the left side of the pair and one for the right.
- * 2) Communicate each part of the pair through its respective channel.
- * This performs the output for the first stage.
- * Creates fresh versions of lc and rc and then sends them out as a pair.
- */
-case class ParOut(c:String,lc:String,rc:String,left:Term,right:Term) extends Output {
- override val channel:Chan = Chan(c)
- override def sub(s:ChanMap) = ParOut(s.resolve(c),s.resolve(lc),s.resolve(rc),left.sub(s),right.sub(s))
- override def fresh(i:Int) = ParOut(c + "#" + i,lc + "#" + i,rc + "#" + i,left.fresh(i),right.fresh(i))
- override def send(s:PiState):(List[Term],PiObject,PiState) = {
+ * 'c.(left | right)
+ * Parallel output.
+ * In proofs-as-processes, parallel communication (i.e. of a pair of objects) happens in 2 stages:
+ * 1) Use a common channel to send 2 new channels, one for the left side of the pair and one for the right.
+ * 2) Communicate each part of the pair through its respective channel.
+ * This performs the output for the first stage.
+ * Creates fresh versions of lc and rc and then sends them out as a pair.
+ */
+case class ParOut(c: String, lc: String, rc: String, left: Term, right: Term) extends Output {
+ override val channel: Chan = Chan(c)
+ override def sub(s: ChanMap) = ParOut(s.resolve(c), s.resolve(lc), s.resolve(rc), left.sub(s), right.sub(s))
+ override def fresh(i: Int) = ParOut(c + "#" + i, lc + "#" + i, rc + "#" + i, left.fresh(i), right.fresh(i))
+ override def send(s: PiState): (List[Term], PiObject, PiState) = {
val freshlc = lc + "#" + s.freshCtr
val freshrc = rc + "#" + s.freshCtr
- val m = ChanMap((Chan(lc),Chan(freshlc)),(Chan(rc),Chan(freshrc)))
- (List(left.sub(m),right.sub(m)),PiPair(Chan(freshlc),Chan(freshrc)),s.incFCtr())
+ val m = ChanMap((Chan(lc), Chan(freshlc)), (Chan(rc), Chan(freshrc)))
+ (List(left.sub(m), right.sub(m)), PiPair(Chan(freshlc), Chan(freshrc)), s.incFCtr())
}
}
/**
- * 'c.cont
- * Optional left output.
- * We do not have sums here. Optional communication happens through pairs of channels as the parallel one.
- * However, WithIn only anticipates a PiLeft or a PiRight.
- * This returns a PiLeft with a fresh channel to receive the actual resource from.
- *
- * Note that this is different from the proofs-as-processes way of handling optional communication.
- * In that, channel polarity gets flipped. i.e. the output channel *receives* the 2 channels for each of
- * the options. We avoid this here.
- */
-case class LeftOut(c:String,lc:String,cont:Term) extends Output {
- override val channel:Chan = Chan(c)
- override def sub(s:ChanMap) = LeftOut(s.resolve(c),s.resolve(lc),cont.sub(s))
- override def fresh(i:Int) = LeftOut(c + "#" + i,lc + "#" + i,cont.fresh(i))
- override def send(s:PiState):(List[Term],PiObject,PiState) = {
+ * 'c.cont
+ * Optional left output.
+ * We do not have sums here. Optional communication happens through pairs of channels as the parallel one.
+ * However, WithIn only anticipates a PiLeft or a PiRight.
+ * This returns a PiLeft with a fresh channel to receive the actual resource from.
+ *
+ * Note that this is different from the proofs-as-processes way of handling optional communication.
+ * In that, channel polarity gets flipped. i.e. the output channel *receives* the 2 channels for each of
+ * the options. We avoid this here.
+ */
+case class LeftOut(c: String, lc: String, cont: Term) extends Output {
+ override val channel: Chan = Chan(c)
+ override def sub(s: ChanMap) = LeftOut(s.resolve(c), s.resolve(lc), cont.sub(s))
+ override def fresh(i: Int) = LeftOut(c + "#" + i, lc + "#" + i, cont.fresh(i))
+ override def send(s: PiState): (List[Term], PiObject, PiState) = {
val freshlc = lc + "#" + s.freshCtr
- val m = ChanMap((Chan(lc),Chan(freshlc)))
- (List(cont.sub(m)),PiLeft(Chan(freshlc)),s.incFCtr())
+ val m = ChanMap((Chan(lc), Chan(freshlc)))
+ (List(cont.sub(m)), PiLeft(Chan(freshlc)), s.incFCtr())
}
}
/**
- * 'c.cont
- * Optional right output.
- * We do not have sums here. Optional communication happens through pairs of channels as the parallel one.
- * However, WithIn only anticipates a PiLeft or a PiRight.
- * This returns a PiRight with a fresh channel to receive the actual resource from.
- *
- * Note that this is different from the proofs-as-processes way of handling optional communication.
- * In that, channel polarity gets flipped. i.e. the output channel *receives* the 2 channels for each of
- * the options. We avoid this here.
- */
-case class RightOut(c:String,rc:String,cont:Term) extends Output {
- override val channel:Chan = Chan(c)
- override def sub(s:ChanMap) = RightOut(s.resolve(c),s.resolve(rc),cont.sub(s))
- override def fresh(i:Int) = RightOut(c + "#" + i,rc + "#" + i,cont.fresh(i))
- override def send(s:PiState):(List[Term],PiObject,PiState) = {
+ * 'c.cont
+ * Optional right output.
+ * We do not have sums here. Optional communication happens through pairs of channels as the parallel one.
+ * However, WithIn only anticipates a PiLeft or a PiRight.
+ * This returns a PiRight with a fresh channel to receive the actual resource from.
+ *
+ * Note that this is different from the proofs-as-processes way of handling optional communication.
+ * In that, channel polarity gets flipped. i.e. the output channel *receives* the 2 channels for each of
+ * the options. We avoid this here.
+ */
+case class RightOut(c: String, rc: String, cont: Term) extends Output {
+ override val channel: Chan = Chan(c)
+ override def sub(s: ChanMap) = RightOut(s.resolve(c), s.resolve(rc), cont.sub(s))
+ override def fresh(i: Int) = RightOut(c + "#" + i, rc + "#" + i, cont.fresh(i))
+ override def send(s: PiState): (List[Term], PiObject, PiState) = {
val freshrc = rc + "#" + s.freshCtr
- val m = ChanMap((Chan(rc),Chan(freshrc)))
- (List(cont.sub(m)),PiRight(Chan(freshrc)),s.incFCtr())
+ val m = ChanMap((Chan(rc), Chan(freshrc)))
+ (List(cont.sub(m)), PiRight(Chan(freshrc)), s.incFCtr())
}
}
/**
- * i(n).'o.0
- * Axiom buffer
- */
+ * i(n).'o.0
+ * Axiom buffer
+ */
object PiId {
- def apply(i:String,o:String,n:String):Input = In(i,n,Out(o,Chan(n)))
+ def apply(i: String, o: String, n: String): Input = In(i, n, Out(o, Chan(n)))
}
/**
- * Proofs-as-processes cut pattern.
- * Substitutes channels lc in left and rc in right with a fresh version of z, then adds them to the state.
- * In a proofs-as-processes generated composition, one (currently the left) will be an input and the other
- * will be an output, so this will force them to communicate with each other.
- * It is nice, however, that polymorphism on withTerm allows us to not specify which is which, so we are closer to
- * the pure pi-calculus.
- */
-case class PiCut(z:String,lc:String,rc:String,left:Term,right:Term) extends Term {
- override def sub(s:ChanMap):PiCut = {
+ * Proofs-as-processes cut pattern.
+ * Substitutes channels lc in left and rc in right with a fresh version of z, then adds them to the state.
+ * In a proofs-as-processes generated composition, one (currently the left) will be an input and the other
+ * will be an output, so this will force them to communicate with each other.
+ * It is nice, however, that polymorphism on withTerm allows us to not specify which is which, so we are closer to
+ * the pure pi-calculus.
+ */
+case class PiCut(z: String, lc: String, rc: String, left: Term, right: Term) extends Term {
+ override def sub(s: ChanMap): PiCut = {
val sl = s - Chan(lc)
val sr = s - Chan(rc)
- PiCut(z,lc,rc,left.sub(sl),right.sub(sr))
+ PiCut(z, lc, rc, left.sub(sl), right.sub(sr))
}
- override def fresh(i:Int) = PiCut(z + "#" + i,lc + "#" + i,rc + "#" + i,left.fresh(i),right.fresh(i))
-
- override def addTo(s:PiState):PiState = {
+ override def fresh(i: Int) = PiCut(z + "#" + i, lc + "#" + i, rc + "#" + i, left.fresh(i), right.fresh(i))
+
+ override def addTo(s: PiState): PiState = {
val freshz = z + "#" + s.freshCtr
- s incFCtr() withTerm left.sub(ChanMap((Chan(lc),Chan(freshz)))) withTerm right.sub(ChanMap((Chan(rc),Chan(freshz))))
+ s incFCtr () withTerm left.sub(ChanMap((Chan(lc), Chan(freshz)))) withTerm right.sub(
+ ChanMap((Chan(rc), Chan(freshz)))
+ )
}
}
-
/**
- * Represents a pending call to an atomic process in 2 situations:
- * 1) A call has been encountered in the workflow, but we are waiting for the inputs to arrive.
- * 2) The inputs have arrived, we have made the call to the actual process, and we are waiting for it to return.
- * @param fun the name of the process
- * @param outChan the channel through which we should send the output when it arrives
- * @param args the process inputs, each including a pattern of the input and the corresponding channel it will arrive through
- */
-case class PiFuture(fun:String, outChan:Chan, args:Seq[PiResource]) {
- /**
- * We call this once the process output has arrived, to create the appropriate Output pi-calculus term.
- */
- def toOutput(res:PiObject):Output = Output.of(res,outChan.s)
-
- def sub(s:ChanMap):PiFuture = copy(args = args map { case PiResource(o,c) => PiResource(s.sub(o),c) })
-
+ * Represents a pending call to an atomic process in 2 situations:
+ * 1) A call has been encountered in the workflow, but we are waiting for the inputs to arrive.
+ * 2) The inputs have arrived, we have made the call to the actual process, and we are waiting for it to return.
+ * @param fun the name of the process
+ * @param outChan the channel through which we should send the output when it arrives
+ * @param args the process inputs, each including a pattern of the input and the corresponding channel it will arrive through
+ */
+case class PiFuture(fun: String, outChan: Chan, args: Seq[PiResource]) {
/**
- * We call this to check if all inputs have arrived and we can execute the process.
- * @return an instantiated instance where all inputs are ground so we can make the call
- * to the actual process, or None if not all inputs have arrived
- */
- def execute(m:ChanMap):Option[PiFuture] = {
+ * We call this once the process output has arrived, to create the appropriate Output pi-calculus term.
+ */
+ def toOutput(res: PiObject): Output = Output.of(res, outChan.s)
+
+ def sub(s: ChanMap): PiFuture = copy(args = args map { case PiResource(o, c) => PiResource(s.sub(o), c) })
+
+ /**
+ * We call this to check if all inputs have arrived and we can execute the process.
+ * @return an instantiated instance where all inputs are ground so we can make the call
+ * to the actual process, or None if not all inputs have arrived
+ */
+ def execute(m: ChanMap): Option[PiFuture] = {
val newFut = sub(m)
- if (newFut.args exists (!_.obj.isGround)) None
- else Some(newFut)
- }
+ if (newFut.args exists (!_.obj.isGround)) {
+ None
+ } else {
+ Some(newFut)
+ }
+ }
}
/**
- * A call to an atomic process in the composition.
- * @param name the name of the process being called
- * @param args the (channel) parameters given
- */
-case class PiCall(name:String, args:Seq[Chan]) extends Term {
- override def sub(s:ChanMap):PiCall = copy(args = args map s.resolve)
- override def fresh(i:Int) = PiCall(name,args map (_.fresh(i)))
-
- override def addTo(s:PiState):PiState = s.handleCall(this)
+ * A call to an atomic process in the composition.
+ * @param name the name of the process being called
+ * @param args the (channel) parameters given
+ */
+case class PiCall(name: String, args: Seq[Chan]) extends Term {
+ override def sub(s: ChanMap): PiCall = copy(args = args map s.resolve)
+ override def fresh(i: Int) = PiCall(name, args map (_.fresh(i)))
+
+ override def addTo(s: PiState): PiState = s.handleCall(this)
}
object PiCall {
- def <(n:String,args:String*):PiCall = PiCall(n,args map Chan)
+ def <(n: String, args: String*): PiCall = PiCall(n, args map Chan)
}
diff --git a/src/com/workflowfm/pew/execution/AkkaExecutor.scala b/src/com/workflowfm/pew/execution/AkkaExecutor.scala
index 5c0cec6..73e8843 100644
--- a/src/com/workflowfm/pew/execution/AkkaExecutor.scala
+++ b/src/com/workflowfm/pew/execution/AkkaExecutor.scala
@@ -1,213 +1,295 @@
package com.workflowfm.pew.execution
+import com.workflowfm.pew._
+
import akka.actor._
import akka.pattern.{ask, pipe}
import akka.util.Timeout
-import com.workflowfm.pew._
import scala.concurrent._
import scala.concurrent.duration._
import scala.util.{Failure, Success}
+class AkkaExecutor(store: PiInstanceStore[Int], processes: PiProcessStore)(
+ implicit val system: ActorSystem,
+ implicit override val executionContext: ExecutionContext = ExecutionContext.global,
+ implicit val timeout: FiniteDuration = 10.seconds
+) extends SimulatorExecutor[Int]
+ with PiObservable[Int] {
-class AkkaExecutor (
- store:PiInstanceStore[Int],
- processes:PiProcessStore
-)(
- implicit val system: ActorSystem,
- override implicit val executionContext: ExecutionContext = ExecutionContext.global,
- implicit val timeout:FiniteDuration = 10.seconds
-) extends SimulatorExecutor[Int] with PiObservable[Int] {
+ def this(
+ store: PiInstanceStore[Int],
+ l: PiProcess*
+ )(implicit system: ActorSystem, context: ExecutionContext, timeout: FiniteDuration) =
+ this(store, SimpleProcessStore(l: _*))
- def this(store:PiInstanceStore[Int], l:PiProcess*)
- (implicit system: ActorSystem, context: ExecutionContext, timeout:FiniteDuration) =
- this(store,SimpleProcessStore(l :_*))
+ def this(system: ActorSystem, context: ExecutionContext, timeout: FiniteDuration, l: PiProcess*) =
+ this(SimpleInstanceStore[Int](), SimpleProcessStore(l: _*))(system, context, timeout)
- def this(system: ActorSystem, context: ExecutionContext, timeout:FiniteDuration,l:PiProcess*) =
- this(SimpleInstanceStore[Int](),SimpleProcessStore(l :_*))(system,context,timeout)
+ def this(l: PiProcess*)(implicit system: ActorSystem) =
+ this(SimpleInstanceStore[Int](), SimpleProcessStore(l: _*))(system, ExecutionContext.global, 10.seconds)
- def this(l:PiProcess*)(implicit system: ActorSystem) =
- this(SimpleInstanceStore[Int](),SimpleProcessStore(l :_*))(system,ExecutionContext.global,10.seconds)
+ // Execution actor
+ val execActor: ActorRef = system.actorOf(AkkaExecutor.execprops(store, processes))
+ // Future await timeout
+ implicit val tOut: Timeout = Timeout(timeout)
- val execActor = system.actorOf(AkkaExecutor.execprops(store,processes))
- implicit val tOut = Timeout(timeout)
-
- override def simulationReady = Await.result(execActor ? AkkaExecutor.SimReady,timeout).asInstanceOf[Boolean]
-
- override protected def init(process:PiProcess,args:Seq[PiObject]):Future[Int] =
- execActor ? AkkaExecutor.Init(process,args) map (_.asInstanceOf[Int])
-
- override protected def start(id:Int) = execActor ! AkkaExecutor.Start(id)
+ override def simulationReady: Boolean =
+ Await
+ .result(execActor ? AkkaExecutor.SimReady, timeout)
+ .asInstanceOf[Boolean]
- override def subscribe(handler:PiEventHandler[Int]):Future[Boolean] = (execActor ? AkkaExecutor.Subscribe(handler)).mapTo[Boolean]
+ override protected def init(process: PiProcess, args: Seq[PiObject]): Future[Int] =
+ execActor ? AkkaExecutor.Init(process, args) map (_.asInstanceOf[Int])
- override def unsubscribe(name:String):Future[Boolean] = (execActor ? AkkaExecutor.Unsubscribe(name)).mapTo[Boolean]
+ override protected def start(id: Int): Unit =
+ execActor ! AkkaExecutor.Start(id)
+
+ override def subscribe(handler: PiEventHandler[Int]): Future[Boolean] =
+ (execActor ? AkkaExecutor.Subscribe(handler))
+ .mapTo[Boolean]
+
+ override def unsubscribe(name: String): Future[Boolean] =
+ (execActor ? AkkaExecutor.Unsubscribe(name))
+ .mapTo[Boolean]
}
+// Defines messages between the different actors involved in the execution
object AkkaExecutor {
- case class Init(p:PiProcess,args:Seq[PiObject])
- case class Start(id:Int)
- case class Result(id:Int,ref:Int,res:MetadataAtomicProcess.MetadataAtomicResult)
- case class Error(id:Int,ref:Int,ex:Throwable)
-
- case class ACall(id:Int,ref:Int,p:MetadataAtomicProcess,args:Seq[PiObject],actor:ActorRef)
+ case class Init(p: PiProcess, args: Seq[PiObject])
+ case class Start(id: Int)
+ case class Result(id: Int, ref: Int, res: MetadataAtomicProcess.MetadataAtomicResult)
+ case class Error(id: Int, ref: Int, ex: Throwable)
+
+ case class ACall(id: Int, ref: Int, p: MetadataAtomicProcess, args: Seq[PiObject], actor: ActorRef)
case object AckCall
-
- case class AFuture(f:Future[Any])
-
+
+ case class AFuture(f: Future[Any])
+
case object Ping
case object SimReady
-
- case class Subscribe(handler:PiEventHandler[Int])
- case class Unsubscribe(name:String)
- def atomicprops(implicit context: ExecutionContext = ExecutionContext.global): Props = Props(new AkkaAtomicProcessExecutor())
- def execprops(store:PiInstanceStore[Int], processes:PiProcessStore)(implicit system: ActorSystem, exc: ExecutionContext): Props = Props(new AkkaExecActor(store,processes))
+ case class Subscribe(handler: PiEventHandler[Int])
+ case class Unsubscribe(name: String)
+
+ /**
+ * Properties of an actor for an atomic process executor
+ *
+ * @param context Actor execution context
+ * @return Atomic process executor actor properties
+ */
+ def atomicprops(implicit context: ExecutionContext = ExecutionContext.global): Props =
+ Props(new AkkaAtomicProcessExecutor())
+
+ /**
+ * Properties of an actor for a process executor
+ *
+ * @param store Process instance store
+ * @param processes Process store
+ * @param system Akka actor system
+ * @param exc Actor execution context
+ * @return Process executor actor properties
+ */
+ def execprops(
+ store: PiInstanceStore[Int],
+ processes: PiProcessStore
+ )(implicit system: ActorSystem, exc: ExecutionContext): Props =
+ Props(new AkkaExecActor(store, processes))
}
-class AkkaExecActor(
- var store:PiInstanceStore[Int],
- processes:PiProcessStore
-)(
- implicit system: ActorSystem,
- override implicit val executionContext: ExecutionContext = ExecutionContext.global
-) extends Actor with SimplePiObservable[Int] {
-
- var ctr:Int = 0
-
- def init(p:PiProcess,args:Seq[PiObject]):Int = {
- val inst = PiInstance(ctr,p,args:_*)
- store = store.put(inst)
- ctr = ctr + 1
- ctr-1
+/**
+ * Process executor actor
+ *
+ * @param store Process instance store
+ * @param processes Process store
+ * @param system Akka actor system
+ * @param executionContext Actor execution context
+ */
+class AkkaExecActor(var store: PiInstanceStore[Int], processes: PiProcessStore)(
+ implicit system: ActorSystem,
+ implicit override val executionContext: ExecutionContext = ExecutionContext.global
+) extends Actor
+ with SimplePiObservable[Int] {
+
+ var ctr: Int = 0
+
+ /**
+ * Initialize a workflow
+ *
+ * @param p Workflow process
+ * @param args Instantiation arguments
+ * @return ID of the workflow instance
+ */
+ def init(p: PiProcess, args: Seq[PiObject]): Int = {
+ val inst = PiInstance(ctr, p, args: _*)
+ store = store.put(inst)
+ ctr = ctr + 1
+ ctr - 1
}
-
- def start(id:Int):Unit = store.get(id) match {
+
+ /**
+ * Start a workflow instance
+ * Publish [[PiFailureNoSuchInstance]] if no such instance found in the store
+ *
+ * @param id ID of the workflow instance
+ */
+ def start(id: Int): Unit = store.get(id) match {
case None => publish(PiFailureNoSuchInstance(id))
- case Some(inst) => {
+ case Some(inst) =>
publish(PiEventStart(inst))
- val ni = inst.reduce
- if (ni.completed) ni.result match {
- case None => {
- publish(PiFailureNoResult(ni))
- store = store.del(id)
- }
- case Some(res) => {
- publish(PiEventResult(ni, res))
- store = store.del(id)
- }
- } else {
- val (toCall,resi) = ni.handleThreads(handleThread(ni))
- val futureCalls = toCall flatMap (resi.piFutureOf)
- //System.err.println("*** [" + ctr + "] Updating state after init")
- store = store.put(resi)
- (toCall zip futureCalls) map runThread(resi)
- }
- }
+
+ // Reduce the instance and check for completion
+ val newInstance = inst.reduce
+ if (newInstance.completed) {
+ // Check for result and publish it (or the failure)
+ newInstance.result match {
+ case None =>
+ publish(PiFailureNoResult(newInstance))
+ store = store.del(id)
+ case Some(res) =>
+ publish(PiEventResult(newInstance, res))
+ store = store.del(id)
+ }
+ } else {
+ // Create actors for each thread the new instance wants to call
+ val (toCall, resultInstance) = newInstance.handleThreads(handleThread(newInstance))
+
+ // Get the calls' Futures
+ val futureCalls = toCall flatMap resultInstance.piFutureOf
+
+ // Put the resulting instance to the store
+ store = store.put(resultInstance)
+
+ // Run each of the new actors
+ (toCall zip futureCalls) foreach runThread(resultInstance)
+ }
}
-
- final def postResult(id:Int,ref:Int, res:MetadataAtomicProcess.MetadataAtomicResult):Unit = {
- publish(PiEventReturn(id,ref,PiObject.get(res._1),res._2))
+
+ /**
+ * Publish process instance result
+ * Publish [[PiFailureNoSuchInstance]] if no such instance found in the store
+ *
+ * @param id ID of the instance
+ * @param ref Thread reference
+ * @param res Metadata of the result to post
+ */
+ final def postResult(id: Int, ref: Int, res: MetadataAtomicProcess.MetadataAtomicResult): Unit = {
+ publish(PiEventReturn(id, ref, PiObject.get(res._1), res._2))
store.get(id) match {
case None => publish(PiFailureNoSuchInstance(id))
- case Some(i) =>
- if (i.id != id) System.err.println("*** [" + id + "] Different instance ID encountered: " + i.id) // This should never happen. We trust the Instance Store!
- else {
- //System.err.println("*** [" + id + "] Running!")
- val ni = i.postResult(ref, res._1).reduce
- if (ni.completed) ni.result match {
- case None => {
- publish(PiFailureNoResult(ni))
- store = store.del(ni.id)
- }
- case Some(res) => {
- publish(PiEventResult(ni, res))
- store = store.del(ni.id)
- }
- } else {
- val (toCall,resi) = ni.handleThreads(handleThread(ni))
- val futureCalls = toCall flatMap (resi.piFutureOf)
- //System.err.println("*** [" + i.id + "] Updating state after: " + ref)
- store = store.put(resi)
- (toCall zip futureCalls) map runThread(resi)
- }
+ case Some(i) =>
+ if (i.id != id) {
+ // This should never happen. We trust the Instance Store!
+ System.err.println("*** [" + id + "] Different instance ID encountered: " + i.id)
+ } else {
+ // Reduce the instance and check for completion
+ val newInstance = i.postResult(ref, res._1).reduce
+ if (newInstance.completed) {
+ // Check for result and publish it (or the failure)
+ newInstance.result match {
+ case None =>
+ publish(PiFailureNoResult(newInstance))
+ store = store.del(newInstance.id)
+ case Some(result) =>
+ publish(PiEventResult(newInstance, result))
+ store = store.del(newInstance.id)
+ }
+ } else {
+ // Create actors for each thread the new instance wants to call
+ val (toCall, resultInstance) = newInstance.handleThreads(handleThread(newInstance))
+
+ // Get the calls' Futures
+ val futureCalls = toCall flatMap resultInstance.piFutureOf
+
+ // Put the resulting instance to the store
+ store = store.put(resultInstance)
+
+ // Run each of the new actors
+ (toCall zip futureCalls) foreach runThread(resultInstance)
+ }
}
- }
+ }
}
-
- def handleThread(i:PiInstance[Int])(ref:Int,f:PiFuture):Boolean = {
- //System.err.println("*** [" + id + "] Checking thread: " + ref + " (" + f.fun + ")")
+
+ def handleThread(i: PiInstance[Int])(ref: Int, f: PiFuture): Boolean = {
+ //TODO Isn't the first match redundant, given the type of f is given as PiFuture in the argument list?
+ // Is it being used to name components of f?
f match {
- case PiFuture(name, outChan, args) => i.getProc(name) match {
- case None => {
- publish(PiFailureUnknownProcess(i, name))
- false
- }
- case Some(p:MetadataAtomicProcess) => true
- case Some(p:CompositeProcess) => { // TODO this should never happen!
- publish(PiFailureAtomicProcessIsComposite(i, name))
- false
- }
+ case PiFuture(name, _, _) =>
+ i.getProc(name) match {
+ case None =>
+ publish(PiFailureUnknownProcess(i, name))
+ false
+ case Some(_: MetadataAtomicProcess) => true
+ case Some(_: CompositeProcess) =>
+ // Composite processes should not be encountered here
+ publish(PiFailureAtomicProcessIsComposite(i, name))
+ false
+ }
}
- } }
- //
-
- def runThread(i:PiInstance[Int])(t:(Int,PiFuture)):Unit = {
- //System.err.println("*** [" + id + "] Running thread: " + t._1 + " (" + t._2.fun + ")")
+ }
+
+ def runThread(i: PiInstance[Int])(t: (Int, PiFuture)): Unit = {
+ //TODO Isn't the first match redundant, given the type of t.second is given as PiFuture in the argument list?
+ // Is it being used to name components of t?
t match {
- case (ref,PiFuture(name, outChan, args)) => i.getProc(name) match {
- case None => {
- // This should never happen! We already checked!
- System.err.println("*** [" + i.id + "] ERROR *** Unable to find process: " + name + " even though we checked already")
- }
- case Some(p:MetadataAtomicProcess) => {
- implicit val tOut = Timeout(1.second)
- val objs = args map (_.obj)
- try {
- publish(PiEventCall(i.id,ref,p,objs))
- // TODO Change from ! to ? to require an acknowledgement
- system.actorOf(AkkaExecutor.atomicprops()) ! AkkaExecutor.ACall(i.id,ref,p,objs,self)
- } catch {
- case _:Throwable => Unit //TODO specify timeout exception here! - also print a warning
+ case (ref, PiFuture(name, _, args)) =>
+ i.getProc(name) match {
+ case None =>
+ // This should never happen! We already checked!
+ System.err.println(
+ "*** [" + i.id + "] ERROR *** Unable to find process: " + name + " even though we checked already"
+ )
+ case Some(p: MetadataAtomicProcess) =>
+ // Prepare timeout and forget types of arguments
+ implicit val tOut: Timeout = Timeout(1.second)
+ val objs = args map (_.obj)
+
+ // Create a new actor for the atomic process and tell it about the call
+ try {
+ publish(PiEventCall(i.id, ref, p, objs))
+ // TODO Change from ! to ? to require an acknowledgement
+ system.actorOf(AkkaExecutor.atomicprops()) ! AkkaExecutor.ACall(i.id, ref, p, objs, self)
+ } catch {
+ case _: Throwable => Unit //TODO specify timeout exception here! - also print a warning
+ }
+ case Some(_: CompositeProcess) =>
+ // Composite processes should not be encountered here
+ // This is also already checked by handleThread
+ publish(PiFailureAtomicProcessIsComposite(i, name))
}
- }
- case Some(p:CompositeProcess) => {// This should never happen! We already checked!
- publish(PiFailureAtomicProcessIsComposite(i, name))
- }
}
- } }
-
- def simulationReady():Boolean = store.simulationReady
-
- def receive = {
- case AkkaExecutor.Init(p,args) => sender() ! init(p,args)
- case AkkaExecutor.Start(id) => start(id)
- case AkkaExecutor.Result(id,ref,res) => postResult(id,ref,res)
- case AkkaExecutor.Error(id,ref,ex) => {
- publish( PiFailureAtomicProcessException(id,ref,ex) )
+ }
+
+ def simulationReady(): Boolean = store.simulationReady
+
+ def receive: PartialFunction[Any, Unit] = {
+ case AkkaExecutor.Init(p, args) => sender() ! init(p, args)
+ case AkkaExecutor.Start(id) => start(id)
+ case AkkaExecutor.Result(id, ref, res) => postResult(id, ref, res)
+ case AkkaExecutor.Error(id, ref, ex) =>
+ publish(PiFailureAtomicProcessException(id, ref, ex))
store = store.del(id)
- }
- case AkkaExecutor.Ping => sender() ! AkkaExecutor.Ping
- case AkkaExecutor.AckCall => Unit
- case AkkaExecutor.SimReady => sender() ! simulationReady()
- case AkkaExecutor.Subscribe(h) => subscribe(h) pipeTo sender()
+ case AkkaExecutor.Ping => sender() ! AkkaExecutor.Ping
+ case AkkaExecutor.AckCall => Unit
+ case AkkaExecutor.SimReady => sender() ! simulationReady()
+ case AkkaExecutor.Subscribe(h) => subscribe(h) pipeTo sender()
case AkkaExecutor.Unsubscribe(name) => unsubscribe(name) pipeTo sender()
- case m => System.err.println("!!! Received unknown message: " + m)
+ case m => System.err.println("!!! Received unknown message: " + m)
}
-
}
-class AkkaAtomicProcessExecutor(implicit val exc: ExecutionContext = ExecutionContext.global) extends Actor { //(executor:ActorRef,p:PiProcess,args:Seq[PiObject])
- def receive = {
- case AkkaExecutor.ACall(id,ref,p,args,actor) => {
+class AkkaAtomicProcessExecutor(implicit val exc: ExecutionContext = ExecutionContext.global) extends Actor { //(executor: ActorRef, p: PiProcess, args: Seq[PiObject])
+
+ def receive: PartialFunction[Any, Unit] = {
+ case AkkaExecutor.ACall(id, ref, p, args, actor) =>
//System.err.println("*** [" + id + "] Calling atomic process: " + p.name + " ref:" + ref)
- p.runMeta(args).onComplete{
- case Success(res) => actor ! AkkaExecutor.Result(id,ref,res)
- case Failure(ex) => actor ! AkkaExecutor.Error(id,ref,ex)
+ p.runMeta(args).onComplete {
+ case Success(res) => actor ! AkkaExecutor.Result(id, ref, res)
+ case Failure(ex) => actor ! AkkaExecutor.Error(id, ref, ex)
}
actor ! AkkaExecutor.AckCall
- }
case m => System.err.println("!! Received unknown message: " + m)
}
}
diff --git a/src/com/workflowfm/pew/execution/MultiStateExecutor.scala b/src/com/workflowfm/pew/execution/MultiStateExecutor.scala
index f5a3c24..0a3139b 100644
--- a/src/com/workflowfm/pew/execution/MultiStateExecutor.scala
+++ b/src/com/workflowfm/pew/execution/MultiStateExecutor.scala
@@ -6,103 +6,110 @@ import scala.concurrent._
import scala.util.{Failure, Success}
/**
- * Executes any PiProcess asynchronously.
- * Only holds a single state, so can only execute one workflow at a time.
- *
- * Running a second workflow after one has finished executing can be risky because
- * promises/futures from the first workflow can trigger changes on the state!
- */
+ * Executes any PiProcess asynchronously.
+ * Only holds a single state, so can only execute one workflow at a time.
+ *
+ * Running a second workflow after one has finished executing can be risky because
+ * promises/futures from the first workflow can trigger changes on the state!
+ */
-class MultiStateExecutor(var store:PiInstanceStore[Int], processes:PiProcessStore)
- (override implicit val executionContext: ExecutionContext = ExecutionContext.global)
- extends SimulatorExecutor[Int] with SimplePiObservable[Int] {
+class MultiStateExecutor(var store: PiInstanceStore[Int], processes: PiProcessStore)(
+ implicit override val executionContext: ExecutionContext = ExecutionContext.global
+) extends SimulatorExecutor[Int]
+ with SimplePiObservable[Int] {
- def this(store:PiInstanceStore[Int], l:PiProcess*) = this(store,SimpleProcessStore(l :_*))
- def this(l:PiProcess*) = this(SimpleInstanceStore[Int](),SimpleProcessStore(l :_*))
-
- var ctr:Int = 0
-
- override protected def init(p:PiProcess,args:Seq[PiObject]) = store.synchronized {
- val inst = PiInstance(ctr,p,args:_*)
- store = store.put(inst)
- ctr = ctr + 1
- Future.successful(ctr-1)
+ def this(store: PiInstanceStore[Int], l: PiProcess*) = this(store, SimpleProcessStore(l: _*))
+ def this(l: PiProcess*) = this(SimpleInstanceStore[Int](), SimpleProcessStore(l: _*))
+
+ var ctr: Int = 0
+
+ override protected def init(p: PiProcess, args: Seq[PiObject]) = store.synchronized {
+ val inst = PiInstance(ctr, p, args: _*)
+ store = store.put(inst)
+ ctr = ctr + 1
+ Future.successful(ctr - 1)
}
- override def start(id:Int):Unit = store.get(id) match {
+ override def start(id: Int): Unit = store.get(id) match {
case None => publish(PiFailureNoSuchInstance(id))
case Some(inst) => {
publish(PiEventStart(inst))
- val ni = inst.reduce
+ val ni = inst.reduce
if (ni.completed) ni.result match {
- case None => {
- publish(PiFailureNoResult(ni))
- store = store.del(id)
- }
- case Some(res) => {
- publish(PiEventResult(ni, res))
- store = store.del(id)
- }
- } else {
- val (_,resi) = ni.handleThreads(handleThread(ni))
- store = store.put(resi)
- }
+ case None => {
+ publish(PiFailureNoResult(ni))
+ store = store.del(id)
+ }
+ case Some(res) => {
+ publish(PiEventResult(ni, res))
+ store = store.del(id)
+ }
+ } else {
+ val (_, resi) = ni.handleThreads(handleThread(ni))
+ store = store.put(resi)
+ }
}
}
-
- final def run(id:Int,f:PiInstance[Int]=>PiInstance[Int]):Unit = store.synchronized {
- store.get(id) match {
+
+ final def run(id: Int, f: PiInstance[Int] => PiInstance[Int]): Unit = store.synchronized {
+ store.get(id) match {
case None => System.err.println("*** [" + id + "] No running instance! ***")
- case Some(i) =>
+ case Some(i) =>
if (i.id != id) System.err.println("*** [" + id + "] Different instance ID encountered: " + i.id)
else {
System.err.println("*** [" + id + "] Running!")
val ni = f(i).reduce
- if (ni.completed) ni.result match {
- case None => {
- publish(PiFailureNoResult(ni))
- store = store.del(ni.id)
- }
- case Some(res) => {
- publish(PiEventResult(ni, res))
- store = store.del(ni.id)
- }
- } else {
- store = store.put(ni.handleThreads(handleThread(ni))._2)
- }
+ if (ni.completed) ni.result match {
+ case None => {
+ publish(PiFailureNoResult(ni))
+ store = store.del(ni.id)
+ }
+ case Some(res) => {
+ publish(PiEventResult(ni, res))
+ store = store.del(ni.id)
+ }
+ } else {
+ store = store.put(ni.handleThreads(handleThread(ni))._2)
+ }
}
- }
+ }
}
-
- def handleThread(i:PiInstance[Int])(ref:Int,f:PiFuture):Boolean = {
- System.err.println("*** [" + i.id + "] Handling thread: " + ref + " (" + f.fun + ")")
+
+ def handleThread(i: PiInstance[Int])(ref: Int, f: PiFuture): Boolean = {
+ System.err.println("*** [" + i.id + "] Handling thread: " + ref + " (" + f.fun + ")")
f match {
- case PiFuture(name, outChan, args) => i.getProc(name) match {
- case None => {
- System.err.println("*** [" + i.id + "] ERROR *** Unable to find process: " + name)
- false
- }
- case Some(p:MetadataAtomicProcess) => {
- val objs = args map (_.obj)
- publish(PiEventCall(i.id,ref,p,objs))
- p.runMeta(args map (_.obj)).onComplete{
- case Success(res) => {
- publish(PiEventReturn(i.id,ref,PiObject.get(res._1),res._2))
- postResult(i.id,ref,res._1)
+ case PiFuture(name, outChan, args) =>
+ i.getProc(name) match {
+ case None => {
+ System.err.println("*** [" + i.id + "] ERROR *** Unable to find process: " + name)
+ false
}
- case Failure (ex) => publish(PiFailureAtomicProcessException(i.id,ref,ex))
+ case Some(p: MetadataAtomicProcess) => {
+ val objs = args map (_.obj)
+ publish(PiEventCall(i.id, ref, p, objs))
+ p.runMeta(args map (_.obj)).onComplete {
+ case Success(res) => {
+ publish(PiEventReturn(i.id, ref, PiObject.get(res._1), res._2))
+ postResult(i.id, ref, res._1)
+ }
+ case Failure(ex) => publish(PiFailureAtomicProcessException(i.id, ref, ex))
+ }
+ System.err.println("*** [" + i.id + "] Called process: " + p.name + " ref:" + ref)
+ true
+ }
+ case Some(p: CompositeProcess) => {
+ System.err.println("*** [" + i.id + "] Executor encountered composite process thread: " + name); false
+ } // TODO this should never happen!
}
- System.err.println("*** [" + i.id + "] Called process: " + p.name + " ref:" + ref)
- true
- }
- case Some(p:CompositeProcess) => { System.err.println("*** [" + i.id + "] Executor encountered composite process thread: " + name); false } // TODO this should never happen!
}
- } }
-
- def postResult(id:Int,ref:Int, res:PiObject):Unit = {
+ }
+
+ def postResult(id: Int, ref: Int, res: PiObject): Unit = {
System.err.println("*** [" + id + "] Received result for thread " + ref + " : " + res)
- run(id,{x => x.postResult(ref, res)})
+ run(id, { x =>
+ x.postResult(ref, res)
+ })
}
-
- override def simulationReady:Boolean = store.simulationReady
+
+ override def simulationReady: Boolean = store.simulationReady
}
diff --git a/src/com/workflowfm/pew/execution/ProcessExecutor.scala b/src/com/workflowfm/pew/execution/ProcessExecutor.scala
index 835688f..d92bd5c 100644
--- a/src/com/workflowfm/pew/execution/ProcessExecutor.scala
+++ b/src/com/workflowfm/pew/execution/ProcessExecutor.scala
@@ -10,24 +10,26 @@ import scala.concurrent.duration._
*
* @deprecated
*/
-case class AtomicProcessExecutor(process:AtomicProcess) {
- def call(args:PiObject*)(implicit ec:ExecutionContext) = {
- val s = process.execState(args) fullReduce()
+case class AtomicProcessExecutor(process: AtomicProcess) {
+ def call(args: PiObject*)(implicit ec: ExecutionContext) = {
+ val s = process.execState(args) fullReduce ()
s.threads.headOption match {
case None => None
- case Some((ref,PiFuture(_, _, args))) => {
- val f = process.run(args map (_.obj))
- val res = Await.result(f,Duration.Inf)
- s.result(ref,res) map (_.fullReduce()) map { x => PiObject.get(x.resources.sub(process.output._1)) }
+ case Some((ref, PiFuture(_, _, args))) => {
+ val f = process.run(args map (_.obj))
+ val res = Await.result(f, Duration.Inf)
+ s.result(ref, res) map (_.fullReduce()) map { x =>
+ PiObject.get(x.resources.sub(process.output._1))
+ }
}
}
}
}
/**
- * Trait representing the ability to execute any PiProcess
- */
-trait ProcessExecutor[KeyT] { this:PiObservable[KeyT] =>
+ * Trait representing the ability to execute any PiProcess
+ */
+trait ProcessExecutor[KeyT] { this: PiObservable[KeyT] =>
/**
* Initializes a PiInstance for a process execution.
* This is always and only invoked before a {@code start}, hence why it is protected.
@@ -36,7 +38,7 @@ trait ProcessExecutor[KeyT] { this:PiObservable[KeyT] =>
* @param args The PiObject arguments to be passed to the process
* @return A Future with the new unique ID that was generated
*/
- protected def init(process:PiProcess,args:Seq[PiObject]):Future[KeyT]
+ protected def init(process: PiProcess, args: Seq[PiObject]): Future[KeyT]
/**
* Starts the execution of an initialized PiInstance.
@@ -44,7 +46,7 @@ trait ProcessExecutor[KeyT] { this:PiObservable[KeyT] =>
* This separation gives a chance to PiEventHandlers to subscribe before execution starts.
* @param id The ID of the instance to start executing
*/
- protected def start(id:KeyT):Unit
+ protected def start(id: KeyT): Unit
implicit val executionContext: ExecutionContext
@@ -54,8 +56,10 @@ trait ProcessExecutor[KeyT] { this:PiObservable[KeyT] =>
* @param args The (real) arguments to be passed to the process
* @return A Future with the ID corresponding to this execution
*/
- def call(process:PiProcess,args:Seq[Any]):Future[KeyT] = {
- init(process,args map PiObject.apply) map { id => start(id) ; id }
+ def call(process: PiProcess, args: Seq[Any]): Future[KeyT] = {
+ init(process, args map PiObject.apply) map { id =>
+ start(id); id
+ }
}
/**
@@ -66,10 +70,14 @@ trait ProcessExecutor[KeyT] { this:PiObservable[KeyT] =>
* @param factory A PiEventHandlerFactory which generates PiEventHandler's for a given ID
* @return A Future with the PiEventHandler that was generated
*/
- def call[H <: PiEventHandler[KeyT]](process:PiProcess,args:Seq[Any],factory:PiEventHandlerFactory[KeyT,H]):Future[H] = {
- init(process,args map PiObject.apply) flatMap { id =>
+ def call[H <: PiEventHandler[KeyT]](
+ process: PiProcess,
+ args: Seq[Any],
+ factory: PiEventHandlerFactory[KeyT, H]
+ ): Future[H] = {
+ init(process, args map PiObject.apply) flatMap { id =>
val handler = factory.build(id)
- subscribe(handler).map { _ =>
+ subscribe(handler).map { _ =>
start(id)
handler
}
@@ -82,36 +90,38 @@ trait ProcessExecutor[KeyT] { this:PiObservable[KeyT] =>
* @param args The (real) arguments to be passed to the process
* @return A Future with the result of the executed process
*/
- def execute(process:PiProcess,args:Seq[Any]):Future[Any] =
- call(process,args,new PromiseHandlerFactory[KeyT]({ id => s"[$id]"})) flatMap (_.future)
+ def execute(process: PiProcess, args: Seq[Any]): Future[Any] =
+ call(process, args, new PromiseHandlerFactory[KeyT]({ id =>
+ s"[$id]"
+ })) flatMap (_.future)
}
object ProcessExecutor {
// def default = SingleBlockingExecutor(Map[String,PiProcess]())
-
+
final case class AlreadyExecutingException(private val cause: Throwable = None.orNull)
- extends Exception("Unable to execute more than one process at a time", cause)
+ extends Exception("Unable to execute more than one process at a time", cause)
/*
final case class UnknownProcessException(val process:String, private val cause: Throwable = None.orNull)
- extends Exception("Unknown process: " + process, cause)
+ extends Exception("Unknown process: " + process, cause)
final case class AtomicProcessIsCompositeException(val process:String, private val cause: Throwable = None.orNull)
extends Exception("Executor encountered composite process thread: " + process + " (this should never happen!)", cause)
final case class NoResultException(val id:String, private val cause: Throwable = None.orNull)
- extends Exception("Failed to get result for: " + id, cause)
+ extends Exception("Failed to get result for: " + id, cause)
final case class NoSuchInstanceException(val id:String, private val cause: Throwable = None.orNull)
extends Exception("Failed to find instance with id: " + id, cause)
- */
+ */
}
-trait SimulatorExecutor[KeyT] extends ProcessExecutor[KeyT] { this:PiObservable[KeyT] =>
+trait SimulatorExecutor[KeyT] extends ProcessExecutor[KeyT] { this: PiObservable[KeyT] =>
/**
* This should check all executing PiInstances if they are simulationReady.
* This means that all possible execution has been performed and they are all
* waiting for simulation time to pass.
* @return true if all PiInstances are simulationReady
*/
- def simulationReady:Boolean
+ def simulationReady: Boolean
/**
* Executes a process with a PromiseHandler
@@ -121,8 +131,10 @@ trait SimulatorExecutor[KeyT] extends ProcessExecutor[KeyT] { this:PiObservable[
* @param args The (real) arguments to be passed to the process
* @return A Future with the result of the executed process
*/
- def simulate(process:PiProcess,args:Seq[Any],timeout:FiniteDuration=10.seconds):Future[Any] = {
- val f = call(process,args,new PromiseHandlerFactory[KeyT]({ id => s"[$id]"}))
+ def simulate(process: PiProcess, args: Seq[Any], timeout: FiniteDuration = 10.seconds): Future[Any] = {
+ val f = call(process, args, new PromiseHandlerFactory[KeyT]({ id =>
+ s"[$id]"
+ }))
val handler = Await.result(f, timeout)
handler.future
}
diff --git a/src/com/workflowfm/pew/execution/SingleBlockingExecutor.scala b/src/com/workflowfm/pew/execution/SingleBlockingExecutor.scala
index 55ffb7b..cbe2e7d 100644
--- a/src/com/workflowfm/pew/execution/SingleBlockingExecutor.scala
+++ b/src/com/workflowfm/pew/execution/SingleBlockingExecutor.scala
@@ -6,54 +6,60 @@ import scala.concurrent.duration.Duration
import scala.annotation.tailrec
/**
- * SingleBlockingExecutor fully executes one PiProcess from a map of given PiProcesses.
- * It blocks waiting for every atomic call to finish, so has no concurrency.
- */
-case class SingleBlockingExecutor(processes:Map[String,PiProcess])(implicit val context:ExecutionContext) { // extends ProcessExecutor[Int] with SimplePiObservable[Int] {
- def call(process:PiProcess,args:Seq[PiObject]) = {
- val s = process.execState(args)
- System.err.println(" === INITIAL STATE === \n" + s + "\n === === === === === === === ===")
- val fs = run(s)
- System.err.println(" === FINAL STATE === \n" + fs + "\n === === === === === === === ===")
- val res = fs.resources.sub(process.output._1)
- if (res.isGround) Some(PiObject.get(res))
- else None
+ * SingleBlockingExecutor fully executes one PiProcess from a map of given PiProcesses.
+ * It blocks waiting for every atomic call to finish, so has no concurrency.
+ */
+case class SingleBlockingExecutor(processes: Map[String, PiProcess])(implicit val context: ExecutionContext) { // extends ProcessExecutor[Int] with SimplePiObservable[Int] {
+ def call(process: PiProcess, args: Seq[PiObject]) = {
+ val s = process.execState(args)
+ System.err.println(" === INITIAL STATE === \n" + s + "\n === === === === === === === ===")
+ val fs = run(s)
+ System.err.println(" === FINAL STATE === \n" + fs + "\n === === === === === === === ===")
+ val res = fs.resources.sub(process.output._1)
+ if (res.isGround) Some(PiObject.get(res))
+ else None
}
-
+
@tailrec
- final def run(s:PiState):PiState = {
+ final def run(s: PiState): PiState = {
val ns = s.fullReduce()
if (ns.threads isEmpty) ns
else run((ns /: ns.threads)(handleThread))
}
-
- def handleThread(s:PiState, x:(Int,PiFuture)):PiState = x match { case (ref,f) =>
- handleThread(ref,f,s) getOrElse s
+
+ def handleThread(s: PiState, x: (Int, PiFuture)): PiState = x match {
+ case (ref, f) =>
+ handleThread(ref, f, s) getOrElse s
}
-
- def handleThread(ref:Int, f:PiFuture, s:PiState):Option[PiState] = f match {
- case PiFuture(name, outChan, args) => processes get name match {
- case None => {
- System.err.println("*** ERROR *** Unable to find process: " + name)
- Some(s removeThread ref)
- }
- case Some(p:MetadataAtomicProcess) => {
- val f = p.runMeta(args map (_.obj))
- val res = Await.result(f,Duration.Inf)
- s.result(ref,res._1)
+
+ def handleThread(ref: Int, f: PiFuture, s: PiState): Option[PiState] = f match {
+ case PiFuture(name, outChan, args) =>
+ processes get name match {
+ case None => {
+ System.err.println("*** ERROR *** Unable to find process: " + name)
+ Some(s removeThread ref)
+ }
+ case Some(p: MetadataAtomicProcess) => {
+ val f = p.runMeta(args map (_.obj))
+ val res = Await.result(f, Duration.Inf)
+ s.result(ref, res._1)
+ }
+ case Some(p: CompositeProcess) => {
+ System.err.println("*** Executor encountered composite process thread: " + name); None
+ } // TODO this should never happen!
}
- case Some(p:CompositeProcess) => { System.err.println("*** Executor encountered composite process thread: " + name); None } // TODO this should never happen!
- }
}
-
- def withProc(p:PiProcess):SingleBlockingExecutor = copy(processes = processes + (p.name->p)) withProcs (p.dependencies :_*)
- def withProcs(l:PiProcess*):SingleBlockingExecutor = (this /: l)(_ withProc _)
-
+
+ def withProc(p: PiProcess): SingleBlockingExecutor =
+ copy(processes = processes + (p.name -> p)) withProcs (p.dependencies: _*)
+ def withProcs(l: PiProcess*): SingleBlockingExecutor = (this /: l)(_ withProc _)
+
//override def simulationReady:Boolean = true
-
- def execute(process:PiProcess,args:Seq[Any]):Option[Any] =
- this withProc process call(process,args map PiObject.apply)
+
+ def execute(process: PiProcess, args: Seq[Any]): Option[Any] =
+ this withProc process call (process, args map PiObject.apply)
}
object SingleBlockingExecutor {
- def apply()(implicit context:ExecutionContext):SingleBlockingExecutor = SingleBlockingExecutor(Map[String,PiProcess]())(context)
+ def apply()(implicit context: ExecutionContext): SingleBlockingExecutor =
+ SingleBlockingExecutor(Map[String, PiProcess]())(context)
}
diff --git a/src/com/workflowfm/pew/execution/SingleStateExecutor.scala b/src/com/workflowfm/pew/execution/SingleStateExecutor.scala
index c81f766..716f90c 100644
--- a/src/com/workflowfm/pew/execution/SingleStateExecutor.scala
+++ b/src/com/workflowfm/pew/execution/SingleStateExecutor.scala
@@ -6,100 +6,106 @@ import scala.concurrent._
import scala.util.{Failure, Success}
/**
- * Executes any PiProcess asynchronously.
- * Only holds a single state, so can only execute one workflow at a time.
- *
- * Running a second workflow after one has finished executing can be risky because
- * promises/futures from the first workflow can trigger changes on the state!
- */
+ * Executes any PiProcess asynchronously.
+ * Only holds a single state, so can only execute one workflow at a time.
+ *
+ * Running a second workflow after one has finished executing can be risky because
+ * promises/futures from the first workflow can trigger changes on the state!
+ */
-class SingleStateExecutor(processes:PiProcessStore)
- (override implicit val executionContext: ExecutionContext = ExecutionContext.global)
- extends ProcessExecutor[Int] with SimplePiObservable[Int] {
+class SingleStateExecutor(processes: PiProcessStore)(
+ implicit override val executionContext: ExecutionContext = ExecutionContext.global
+) extends ProcessExecutor[Int]
+ with SimplePiObservable[Int] {
- def this(l:PiProcess*) = this(SimpleProcessStore(l :_*))
-
- var ctr:Int = 0
- var instance:Option[PiInstance[Int]] = None
-
- override protected def init(p:PiProcess,args:Seq[PiObject]):Future[Int] = Future {
+ def this(l: PiProcess*) = this(SimpleProcessStore(l: _*))
+
+ var ctr: Int = 0
+ var instance: Option[PiInstance[Int]] = None
+
+ override protected def init(p: PiProcess, args: Seq[PiObject]): Future[Int] = Future {
if (instance.isDefined) throw new ProcessExecutor.AlreadyExecutingException()
else {
- val inst = PiInstance(ctr,p,args:_*)
+ val inst = PiInstance(ctr, p, args: _*)
instance = Some(inst)
ctr = ctr + 1
ctr - 1
}
}
- override protected def start(id:Int):Unit = instance match {
+ override protected def start(id: Int): Unit = instance match {
case None => publish(PiFailureNoSuchInstance(id))
- case Some(i) =>
+ case Some(i) =>
if (i.id != id) Future.failed(new ProcessExecutor.AlreadyExecutingException())
else {
publish(PiEventStart(i))
run
}
}
-
- def success(id:Int,res:Any) = {
+
+ def success(id: Int, res: Any) = {
instance match {
case Some(i) => {
- publish(PiEventResult(i,res))
+ publish(PiEventResult(i, res))
}
case None => publish(PiFailureNoSuchInstance(id))
}
instance = None
}
-
- def failure(inst:PiInstance[Int]) = {
- publish(PiFailureNoResult(inst))
- instance = None
+
+ def failure(inst: PiInstance[Int]) = {
+ publish(PiFailureNoResult(inst))
+ instance = None
}
-
- final def run:Unit = this.synchronized {
+
+ final def run: Unit = this.synchronized {
instance match {
case None => Unit
case Some(i) =>
val ni = i.reduce
if (ni.completed) {
ni.result match {
- case None => failure(ni)
- case Some(res) => success(ni.id,res)
- }} else {
+ case None => failure(ni)
+ case Some(res) => success(ni.id, res)
+ }
+ } else {
instance = Some(ni.handleThreads(handleThread(ni))._2)
}
- }}
-
- def handleThread(i:PiInstance[Int])(ref:Int,f:PiFuture):Boolean = f match {
- case PiFuture(name, outChan, args) => i.getProc(name) match {
- case None => {
- publish(PiFailureUnknownProcess(i, name))
- false
- }
- case Some(p:MetadataAtomicProcess) => {
- val objs = args map (_.obj)
- publish(PiEventCall(i.id,ref,p,objs))
- p.runMeta(objs).onComplete{
- case Success(res) => {
- publish(PiEventReturn(i.id,ref,PiObject.get(res._1),res._2))
- postResult(i.id,ref,res._1)
- }
- case Failure(ex) => {
- publish(PiFailureAtomicProcessException(i.id,ref,ex))
- instance = None
+ }
+ }
+
+ def handleThread(i: PiInstance[Int])(ref: Int, f: PiFuture): Boolean = f match {
+ case PiFuture(name, outChan, args) =>
+ i.getProc(name) match {
+ case None => {
+ publish(PiFailureUnknownProcess(i, name))
+ false
+ }
+ case Some(p: MetadataAtomicProcess) => {
+ val objs = args map (_.obj)
+ publish(PiEventCall(i.id, ref, p, objs))
+ p.runMeta(objs).onComplete {
+ case Success(res) => {
+ publish(PiEventReturn(i.id, ref, PiObject.get(res._1), res._2))
+ postResult(i.id, ref, res._1)
+ }
+ case Failure(ex) => {
+ publish(PiFailureAtomicProcessException(i.id, ref, ex))
+ instance = None
+ }
}
+ true
}
- true
+ case Some(p: CompositeProcess) => {
+ System.err.println("*** Executor encountered composite process thread: " + name); false
+ } // TODO this should never happen!
}
- case Some(p:CompositeProcess) => { System.err.println("*** Executor encountered composite process thread: " + name); false } // TODO this should never happen!
- }
}
-
- def postResult(id:Int,ref:Int, res:PiObject):Unit = this.synchronized {
+
+ def postResult(id: Int, ref: Int, res: PiObject): Unit = this.synchronized {
instance match {
case None => publish(PiFailureNoSuchInstance(id))
- case Some(i) =>
+ case Some(i) =>
if (i.id != id) publish(PiFailureNoSuchInstance(id))
else {
instance = Some(i.postResult(ref, res))
@@ -107,10 +113,10 @@ class SingleStateExecutor(processes:PiProcessStore)
}
}
}
-
- def simulationReady:Boolean = instance match {
- case None => true
+
+ def simulationReady: Boolean = instance match {
+ case None => true
case Some(i) => i.simulationReady
}
-
+
}
diff --git a/src/com/workflowfm/pew/metrics/Measure.scala b/src/com/workflowfm/pew/metrics/Measure.scala
index 1d9288e..cdb3796 100644
--- a/src/com/workflowfm/pew/metrics/Measure.scala
+++ b/src/com/workflowfm/pew/metrics/Measure.scala
@@ -2,84 +2,103 @@ package com.workflowfm.pew.metrics
import com.workflowfm.pew._
-case class ProcessMetrics[KeyT] (piID:KeyT, ref:Int, process:String, start:Long=System.currentTimeMillis(), finish:Option[Long]=None, result:Option[String]=None) {
- def complete(time:Long,result:Any) = copy(finish=Some(time),result=Some(result.toString))
+case class ProcessMetrics[KeyT](
+ piID: KeyT,
+ ref: Int,
+ process: String,
+ start: Long = System.currentTimeMillis(),
+ finish: Option[Long] = None,
+ result: Option[String] = None
+) {
+ def complete(time: Long, result: Any) = copy(finish = Some(time), result = Some(result.toString))
}
-case class WorkflowMetrics[KeyT] (piID:KeyT, start:Long=System.currentTimeMillis(), calls:Int=0, finish:Option[Long]=None, result:Option[String]=None) {
- def complete(time:Long,result:Any) = copy(finish=Some(time),result=Some(result.toString))
- def call = copy(calls=calls+1)
+case class WorkflowMetrics[KeyT](
+ piID: KeyT,
+ start: Long = System.currentTimeMillis(),
+ calls: Int = 0,
+ finish: Option[Long] = None,
+ result: Option[String] = None
+) {
+ def complete(time: Long, result: Any) = copy(finish = Some(time), result = Some(result.toString))
+ def call = copy(calls = calls + 1)
}
class MetricsAggregator[KeyT] {
import scala.collection.immutable.Map
-
- val processMap = scala.collection.mutable.Map[KeyT,Map[Int,ProcessMetrics[KeyT]]]()
- val workflowMap = scala.collection.mutable.Map[KeyT,WorkflowMetrics[KeyT]]()
-
+
+ val processMap = scala.collection.mutable.Map[KeyT, Map[Int, ProcessMetrics[KeyT]]]()
+ val workflowMap = scala.collection.mutable.Map[KeyT, WorkflowMetrics[KeyT]]()
+
// Set
-
- def +=(m:ProcessMetrics[KeyT]) = {
+
+ def +=(m: ProcessMetrics[KeyT]) = {
val old = processMap.get(m.piID) match {
- case None => Map[Int,ProcessMetrics[KeyT]]()
+ case None => Map[Int, ProcessMetrics[KeyT]]()
case Some(map) => map
}
processMap += (m.piID -> (old + (m.ref -> m)))
}
- def +=(m:WorkflowMetrics[KeyT]) = workflowMap += (m.piID->m)
-
+ def +=(m: WorkflowMetrics[KeyT]) = workflowMap += (m.piID -> m)
+
// Update
-
- def ^(piID:KeyT,ref:Int,u:ProcessMetrics[KeyT]=>ProcessMetrics[KeyT]) =
- processMap.get(piID).flatMap(_.get(ref)).map { m => this += u(m) }
- def ^(piID:KeyT,u:WorkflowMetrics[KeyT]=>WorkflowMetrics[KeyT]) =
- workflowMap.get(piID).map { m => this += u(m) }
-
+
+ def ^(piID: KeyT, ref: Int, u: ProcessMetrics[KeyT] => ProcessMetrics[KeyT]) =
+ processMap.get(piID).flatMap(_.get(ref)).map { m =>
+ this += u(m)
+ }
+ def ^(piID: KeyT, u: WorkflowMetrics[KeyT] => WorkflowMetrics[KeyT]) =
+ workflowMap.get(piID).map { m =>
+ this += u(m)
+ }
+
// Handle events
-
- def workflowStart(piID:KeyT, time:Long=System.currentTimeMillis()):Unit =
- this += WorkflowMetrics(piID,time)
- def workflowResult(piID:KeyT, result:Any, time:Long=System.currentTimeMillis()):Unit =
- this ^ (piID,_.complete(time,result))
- def workflowException(piID:KeyT, ex:Throwable, time:Long=System.currentTimeMillis()):Unit =
- this ^ (piID,_.complete(time,"Exception: " + ex.getLocalizedMessage))
-
- def procCall(piID:KeyT, ref:Int, process:String, time:Long=System.currentTimeMillis()):Unit = {
- this += ProcessMetrics(piID,ref,process,time)
- this ^ (piID,_.call)
+
+ def workflowStart(piID: KeyT, time: Long = System.currentTimeMillis()): Unit =
+ this += WorkflowMetrics(piID, time)
+ def workflowResult(piID: KeyT, result: Any, time: Long = System.currentTimeMillis()): Unit =
+ this ^ (piID, _.complete(time, result))
+ def workflowException(piID: KeyT, ex: Throwable, time: Long = System.currentTimeMillis()): Unit =
+ this ^ (piID, _.complete(time, "Exception: " + ex.getLocalizedMessage))
+
+ def procCall(piID: KeyT, ref: Int, process: String, time: Long = System.currentTimeMillis()): Unit = {
+ this += ProcessMetrics(piID, ref, process, time)
+ this ^ (piID, _.call)
}
- def procReturn(piID:KeyT, ref:Int, result:Any, time:Long=System.currentTimeMillis()):Unit =
- this ^ (piID,ref,_.complete(time, result))
- def processException(piID:KeyT, ref:Int, ex:Throwable, time:Long=System.currentTimeMillis()):Unit = processFailure(piID,ref,ex.getLocalizedMessage,time)
- def processFailure(piID:KeyT, ref:Int, ex:String, time:Long=System.currentTimeMillis()):Unit = {
- this ^ (piID,_.complete(time,"Exception: " + ex))
- this ^ (piID,ref,_.complete(time,"Exception: " + ex))
+ def procReturn(piID: KeyT, ref: Int, result: Any, time: Long = System.currentTimeMillis()): Unit =
+ this ^ (piID, ref, _.complete(time, result))
+ def processException(piID: KeyT, ref: Int, ex: Throwable, time: Long = System.currentTimeMillis()): Unit =
+ processFailure(piID, ref, ex.getLocalizedMessage, time)
+ def processFailure(piID: KeyT, ref: Int, ex: String, time: Long = System.currentTimeMillis()): Unit = {
+ this ^ (piID, _.complete(time, "Exception: " + ex))
+ this ^ (piID, ref, _.complete(time, "Exception: " + ex))
}
-
- // Getters
-
- def keys = workflowMap.keys
+
+ // Getters
+
+ def keys = workflowMap.keys
def workflowMetrics = workflowMap.values.toSeq.sortBy(_.start)
- def processMetrics = processMap.values.flatMap(_.values).toSeq.sortBy(_.start)
- def processMetricsOf(id:KeyT) = processMap.getOrElse(id,Map[Int,ProcessMetrics[KeyT]]()).values.toSeq.sortBy(_.start)
+ def processMetrics = processMap.values.flatMap(_.values).toSeq.sortBy(_.start)
+ def processMetricsOf(id: KeyT) =
+ processMap.getOrElse(id, Map[Int, ProcessMetrics[KeyT]]()).values.toSeq.sortBy(_.start)
def processSet = processMap.values.flatMap(_.values.map(_.process)).toSet[String]
}
-class MetricsHandler[KeyT](override val name: String, timeFn: PiMetadata.Key[Long] = PiMetadata.SystemTime )
- extends MetricsAggregator[KeyT] with PiEventHandler[KeyT] {
+class MetricsHandler[KeyT](override val name: String, timeFn: PiMetadata.Key[Long] = PiMetadata.SystemTime)
+ extends MetricsAggregator[KeyT]
+ with PiEventHandler[KeyT] {
- override def apply( e: PiEvent[KeyT] ): Boolean = {
+ override def apply(e: PiEvent[KeyT]): Boolean = {
e match {
- case PiEventStart(i,t) => workflowStart( i.id, timeFn(t) )
- case PiEventResult(i,r,t) => workflowResult( i.id, r, timeFn(t) )
- case PiEventCall(i,r,p,_,t) => procCall( i, r, p.iname, timeFn(t) )
- case PiEventReturn(i,r,s,t) => procReturn( i, r, s, timeFn(t) )
- case PiFailureAtomicProcessException(i,r,m,_,t) => processFailure( i, r, m, timeFn(t) )
+ case PiEventStart(i, t) => workflowStart(i.id, timeFn(t))
+ case PiEventResult(i, r, t) => workflowResult(i.id, r, timeFn(t))
+ case PiEventCall(i, r, p, _, t) => procCall(i, r, p.iname, timeFn(t))
+ case PiEventReturn(i, r, s, t) => procReturn(i, r, s, timeFn(t))
+ case PiFailureAtomicProcessException(i, r, m, _, t) => processFailure(i, r, m, timeFn(t))
case ev: PiFailure[KeyT] =>
- workflowException( ev.id, ev.exception, timeFn(ev.metadata) )
+ workflowException(ev.id, ev.exception, timeFn(ev.metadata))
}
false
}
}
-
diff --git a/src/com/workflowfm/pew/metrics/Output.scala b/src/com/workflowfm/pew/metrics/Output.scala
index ac4a5cf..1bddd60 100644
--- a/src/com/workflowfm/pew/metrics/Output.scala
+++ b/src/com/workflowfm/pew/metrics/Output.scala
@@ -5,118 +5,154 @@ import org.apache.commons.lang3.time.DurationFormatUtils
import java.text.SimpleDateFormat
trait MetricsOutput[KeyT] extends (MetricsAggregator[KeyT] => Unit) {
- def and(h:MetricsOutput[KeyT]) = MetricsOutputs(this,h)
+ def and(h: MetricsOutput[KeyT]) = MetricsOutputs(this, h)
}
object MetricsOutput {
- def formatOption[T](v:Option[T], nullValue: String, format:T=>String={ x:T => x.toString }) = v.map(format).getOrElse(nullValue)
- def formatTime(format:String)(time:Long) = new SimpleDateFormat(format).format(time)
- def formatTimeOption(time:Option[Long], format:String, nullValue:String) =
+ def formatOption[T](v: Option[T], nullValue: String, format: T => String = { x: T =>
+ x.toString
+ }) = v.map(format).getOrElse(nullValue)
+ def formatTime(format: String)(time: Long) = new SimpleDateFormat(format).format(time)
+ def formatTimeOption(time: Option[Long], format: String, nullValue: String) =
formatOption(time, nullValue, formatTime(format))
- def formatDuration(from:Long, to:Long, format:String) =
- DurationFormatUtils.formatDuration(to-from,format)
- def formatDuration(from:Option[Long], to:Long, format:String, nullValue:String) =
- from.map { f => DurationFormatUtils.formatDuration(to-f,format).toString } getOrElse(nullValue)
- def formatDuration(from:Option[Long], to:Option[Long], format:String, nullValue:String) =
- from.map { f => to.map {
- t => DurationFormatUtils.formatDuration(t-f,format).toString } getOrElse(nullValue)
- } getOrElse(nullValue)
+ def formatDuration(from: Long, to: Long, format: String) =
+ DurationFormatUtils.formatDuration(to - from, format)
+ def formatDuration(from: Option[Long], to: Long, format: String, nullValue: String) =
+ from.map { f =>
+ DurationFormatUtils.formatDuration(to - f, format).toString
+ } getOrElse (nullValue)
+ def formatDuration(from: Option[Long], to: Option[Long], format: String, nullValue: String) =
+ from.map { f =>
+ to.map { t =>
+ DurationFormatUtils.formatDuration(t - f, format).toString
+ } getOrElse (nullValue)
+ } getOrElse (nullValue)
}
-case class MetricsOutputs[KeyT](handlers:Queue[MetricsOutput[KeyT]]) extends MetricsOutput[KeyT] {
- override def apply(aggregator:MetricsAggregator[KeyT]) = handlers map (_.apply(aggregator))
- override def and(h:MetricsOutput[KeyT]) = MetricsOutputs(handlers :+ h)
+case class MetricsOutputs[KeyT](handlers: Queue[MetricsOutput[KeyT]]) extends MetricsOutput[KeyT] {
+ override def apply(aggregator: MetricsAggregator[KeyT]) = handlers map (_.apply(aggregator))
+ override def and(h: MetricsOutput[KeyT]) = MetricsOutputs(handlers :+ h)
}
object MetricsOutputs {
- def apply[KeyT](handlers:MetricsOutput[KeyT]*):MetricsOutputs[KeyT] = MetricsOutputs[KeyT](Queue[MetricsOutput[KeyT]]() ++ handlers)
+ def apply[KeyT](handlers: MetricsOutput[KeyT]*): MetricsOutputs[KeyT] =
+ MetricsOutputs[KeyT](Queue[MetricsOutput[KeyT]]() ++ handlers)
}
-
trait MetricsStringOutput[KeyT] extends MetricsOutput[KeyT] {
val nullValue = "NULL"
-
- def procHeader(separator:String) = Seq("ID","PID","Process","Start","Finish","Result").mkString(separator)
- def procCSV(separator:String,timeFormat:Option[String])(m:ProcessMetrics[KeyT]) = m match {
- case ProcessMetrics(id,r,p,s,f,res) => timeFormat match {
- case None => Seq(id,r,p,s,MetricsOutput.formatOption(f,nullValue),MetricsOutput.formatOption(res,nullValue)).mkString(separator)
- case Some(format) => Seq(id,r,p,MetricsOutput.formatTime(format)(s),MetricsOutput.formatTimeOption(f,format,nullValue),MetricsOutput.formatOption(res,nullValue)).mkString(separator)
- }
- }
-
- def workflowHeader(separator:String) = Seq("ID","PID","Process","Start","Finish","Result").mkString(separator)
- def workflowCSV(separator:String,timeFormat:Option[String])(m:WorkflowMetrics[KeyT]) = m match {
- case WorkflowMetrics(id,s,c,f,res) => timeFormat match {
- case None => Seq(id,s,c,MetricsOutput.formatOption(f,nullValue),MetricsOutput.formatOption(res,nullValue)).mkString(separator)
- case Some(format) => Seq(id,MetricsOutput.formatTime(format)(s),c,MetricsOutput.formatTimeOption(f,format,nullValue),MetricsOutput.formatOption(res,nullValue)).mkString(separator)
- }
+
+ def procHeader(separator: String) = Seq("ID", "PID", "Process", "Start", "Finish", "Result").mkString(separator)
+ def procCSV(separator: String, timeFormat: Option[String])(m: ProcessMetrics[KeyT]) = m match {
+ case ProcessMetrics(id, r, p, s, f, res) =>
+ timeFormat match {
+ case None =>
+ Seq(id, r, p, s, MetricsOutput.formatOption(f, nullValue), MetricsOutput.formatOption(res, nullValue))
+ .mkString(separator)
+ case Some(format) =>
+ Seq(
+ id,
+ r,
+ p,
+ MetricsOutput.formatTime(format)(s),
+ MetricsOutput.formatTimeOption(f, format, nullValue),
+ MetricsOutput.formatOption(res, nullValue)
+ ).mkString(separator)
+ }
}
-
- def processes(aggregator:MetricsAggregator[KeyT],separator:String,lineSep:String="\n",timeFormat:Option[String]=None) =
- aggregator.processMetrics.map(procCSV(separator,timeFormat)).mkString(lineSep)
- def workflows(aggregator:MetricsAggregator[KeyT],separator:String,lineSep:String="\n",timeFormat:Option[String]=None) =
- aggregator.workflowMetrics.map(workflowCSV(separator,timeFormat)).mkString(lineSep)
-}
+ def workflowHeader(separator: String) = Seq("ID", "PID", "Process", "Start", "Finish", "Result").mkString(separator)
+ def workflowCSV(separator: String, timeFormat: Option[String])(m: WorkflowMetrics[KeyT]) = m match {
+ case WorkflowMetrics(id, s, c, f, res) =>
+ timeFormat match {
+ case None =>
+ Seq(id, s, c, MetricsOutput.formatOption(f, nullValue), MetricsOutput.formatOption(res, nullValue))
+ .mkString(separator)
+ case Some(format) =>
+ Seq(
+ id,
+ MetricsOutput.formatTime(format)(s),
+ c,
+ MetricsOutput.formatTimeOption(f, format, nullValue),
+ MetricsOutput.formatOption(res, nullValue)
+ ).mkString(separator)
+ }
+ }
+ def processes(
+ aggregator: MetricsAggregator[KeyT],
+ separator: String,
+ lineSep: String = "\n",
+ timeFormat: Option[String] = None
+ ) =
+ aggregator.processMetrics.map(procCSV(separator, timeFormat)).mkString(lineSep)
+ def workflows(
+ aggregator: MetricsAggregator[KeyT],
+ separator: String,
+ lineSep: String = "\n",
+ timeFormat: Option[String] = None
+ ) =
+ aggregator.workflowMetrics.map(workflowCSV(separator, timeFormat)).mkString(lineSep)
+}
-class MetricsPrinter[KeyT] extends MetricsStringOutput[KeyT] {
- val separator = "\t| "
- val lineSep = "\n"
+class MetricsPrinter[KeyT] extends MetricsStringOutput[KeyT] {
+ val separator = "\t| "
+ val lineSep = "\n"
val timeFormat = Some("YYYY-MM-dd HH:mm:ss.SSS")
-
- override def apply(aggregator:MetricsAggregator[KeyT]) = {
+
+ override def apply(aggregator: MetricsAggregator[KeyT]) = {
println(
-s"""
+ s"""
Tasks
-----
${procHeader(separator)}
-${processes(aggregator,separator,lineSep,timeFormat)}
+${processes(aggregator, separator, lineSep, timeFormat)}
Workflows
---------
${workflowHeader(separator)}
-${workflows(aggregator,separator,lineSep,timeFormat)}
+${workflows(aggregator, separator, lineSep, timeFormat)}
"""
- )
+ )
}
}
trait FileOutput {
import java.io._
-
- def writeToFile(filePath:String,output:String) = try {
- val file = new File(filePath)
- val bw = new BufferedWriter(new FileWriter(file))
- bw.write(output)
- bw.close()
- } catch {
- case e:Exception => e.printStackTrace()
- }
+
+ def writeToFile(filePath: String, output: String) =
+ try {
+ val file = new File(filePath)
+ val bw = new BufferedWriter(new FileWriter(file))
+ bw.write(output)
+ bw.close()
+ } catch {
+ case e: Exception => e.printStackTrace()
+ }
}
-class MetricsCSVFileOutput[KeyT](path:String,name:String) extends MetricsStringOutput[KeyT] with FileOutput {
+class MetricsCSVFileOutput[KeyT](path: String, name: String) extends MetricsStringOutput[KeyT] with FileOutput {
val separator = ","
-
- override def apply(aggregator:MetricsAggregator[KeyT]) = {
- val taskFile = s"$path$name-tasks.csv"
+
+ override def apply(aggregator: MetricsAggregator[KeyT]) = {
+ val taskFile = s"$path$name-tasks.csv"
val workflowFile = s"$path$name-workflows.csv"
- writeToFile(taskFile, procHeader(separator) + "\n" + processes(aggregator,separator) + "\n")
- writeToFile(workflowFile, workflowHeader(separator) + "\n" + workflows(aggregator,separator) + "\n")
+ writeToFile(taskFile, procHeader(separator) + "\n" + processes(aggregator, separator) + "\n")
+ writeToFile(workflowFile, workflowHeader(separator) + "\n" + workflows(aggregator, separator) + "\n")
}
}
-class MetricsD3Timeline[KeyT](path:String,file:String) extends MetricsOutput[KeyT] with FileOutput {
+class MetricsD3Timeline[KeyT](path: String, file: String) extends MetricsOutput[KeyT] with FileOutput {
import java.io._
-
- override def apply(aggregator:MetricsAggregator[KeyT]) = {
- val result = build(aggregator,System.currentTimeMillis())
+
+ override def apply(aggregator: MetricsAggregator[KeyT]) = {
+ val result = build(aggregator, System.currentTimeMillis())
println(result)
val dataFile = s"$path$file-data.js"
writeToFile(dataFile, result)
}
-
- def build(aggregator:MetricsAggregator[KeyT], now:Long) = {
- var buf:StringBuilder = StringBuilder.newBuilder
+
+ def build(aggregator: MetricsAggregator[KeyT], now: Long) = {
+ var buf: StringBuilder = StringBuilder.newBuilder
buf.append("var processes = [\n")
for (p <- aggregator.processSet) buf.append(s"""\t"$p",\n""")
buf.append("];\n\n")
@@ -125,25 +161,29 @@ class MetricsD3Timeline[KeyT](path:String,file:String) extends MetricsOutput[Key
buf.append("];\n")
buf.toString
}
-
- def workflowEntry(m:WorkflowMetrics[KeyT], agg:MetricsAggregator[KeyT], now:Long, prefix:String) = {
- val processes = (Map[String,Queue[ProcessMetrics[KeyT]]]() /: agg.processMetricsOf(m.piID)){ case (m,p) => {
+
+ def workflowEntry(m: WorkflowMetrics[KeyT], agg: MetricsAggregator[KeyT], now: Long, prefix: String) = {
+ val processes = (Map[String, Queue[ProcessMetrics[KeyT]]]() /: agg.processMetricsOf(m.piID)) {
+ case (m, p) => {
val procs = m.getOrElse(p.process, Queue()) :+ p
- m + (p.process->procs)
- } }
- val data = processes.map { case (proc,i) => processEntry(proc, i, now, prefix + "\t") }
+ m + (p.process -> procs)
+ }
+ }
+ val data = processes.map { case (proc, i) => processEntry(proc, i, now, prefix + "\t") }
s"""$prefix{id: \"${m.piID}\", data: [\n${data.mkString("")}$prefix]},\n"""
}
-
- def processEntry(proc:String, i:Seq[ProcessMetrics[KeyT]], now:Long, prefix:String) = {
- if (i.isEmpty) "" else {
- val times = ("" /: i){ case (s,m) => s"$s${callEntry(now,m,prefix + "\t")}" }
+
+ def processEntry(proc: String, i: Seq[ProcessMetrics[KeyT]], now: Long, prefix: String) = {
+ if (i.isEmpty) ""
+ else {
+ val times = ("" /: i) { case (s, m) => s"$s${callEntry(now, m, prefix + "\t")}" }
s"""$prefix{label: \"$proc\", times: [\n$times$prefix]},\n"""
}
}
-
- def callEntry(now:Long, m:ProcessMetrics[KeyT], prefix:String) = {
- s"""$prefix{"label":"${m.ref}", "process": "${m.process}", "starting_time": ${m.start}, "ending_time": ${m.finish.getOrElse(now)}, "result":"${MetricsOutput.formatOption(m.result,"NONE")}"},\n"""
-
+
+ def callEntry(now: Long, m: ProcessMetrics[KeyT], prefix: String) = {
+ s"""$prefix{"label":"${m.ref}", "process": "${m.process}", "starting_time": ${m.start}, "ending_time": ${m.finish
+ .getOrElse(now)}, "result":"${MetricsOutput.formatOption(m.result, "NONE")}"},\n"""
+
}
}
diff --git a/src/com/workflowfm/pew/mongodb/MongoExecutor.scala b/src/com/workflowfm/pew/mongodb/MongoExecutor.scala
index a396552..3b5eacb 100644
--- a/src/com/workflowfm/pew/mongodb/MongoExecutor.scala
+++ b/src/com/workflowfm/pew/mongodb/MongoExecutor.scala
@@ -30,35 +30,38 @@ import org.mongodb.scala.Observer
import org.mongodb.scala.ReadConcern
import org.mongodb.scala.WriteConcern
-class MongoExecutor
- (client:MongoClient, db:String, collection:String, processes:PiProcessStore)
- (implicit val executionContext: ExecutionContext = ExecutionContext.global)
- extends ProcessExecutor[ObjectId] with SimplePiObservable[ObjectId] {
+class MongoExecutor(client: MongoClient, db: String, collection: String, processes: PiProcessStore)(
+ implicit val executionContext: ExecutionContext = ExecutionContext.global
+) extends ProcessExecutor[ObjectId]
+ with SimplePiObservable[ObjectId] {
+
+ def this(client: MongoClient, db: String, collection: String, l: PiProcess*) =
+ this(client, db, collection, SimpleProcessStore(l: _*))
- def this(client:MongoClient, db:String, collection:String, l:PiProcess*) = this(client,db,collection,SimpleProcessStore(l :_*))
-
final val CAS_MAX_ATTEMPTS = 10
- final val CAS_WAIT_MS = 1
-
- val codecRegistry = fromRegistries(fromProviders(new PiCodecProvider(processes)),DEFAULT_CODEC_REGISTRY)
- val database: MongoDatabase = client.getDatabase(db).
- withCodecRegistry(codecRegistry).
+ final val CAS_WAIT_MS = 1
+
+ val codecRegistry = fromRegistries(fromProviders(new PiCodecProvider(processes)), DEFAULT_CODEC_REGISTRY)
+ val database: MongoDatabase = client
+ .getDatabase(db)
+ .withCodecRegistry(codecRegistry)
+ .
//withReadConcern(ReadConcern.LINEARIZABLE).
withWriteConcern(WriteConcern.MAJORITY);
- val col:MongoCollection[PiInstance[ObjectId]] = database.getCollection(collection)
+ val col: MongoCollection[PiInstance[ObjectId]] = database.getCollection(collection)
- override def init(p:PiProcess,args:Seq[PiObject]):Future[ObjectId] = {
- val oid = new ObjectId
- val inst = PiInstance(oid,p,args:_*)
- col.insertOne(inst).toFuture() map (_=>oid)
+ override def init(p: PiProcess, args: Seq[PiObject]): Future[ObjectId] = {
+ val oid = new ObjectId
+ val inst = PiInstance(oid, p, args: _*)
+ col.insertOne(inst).toFuture() map (_ => oid)
}
-
- override def start(id:ObjectId):Unit = {
- val promise=Promise[PiInstance[ObjectId]]()
- update(id,startUpdate,0,promise)
+
+ override def start(id: ObjectId): Unit = {
+ val promise = Promise[PiInstance[ObjectId]]()
+ update(id, startUpdate, 0, promise)
promise.future.recover({
- case PiException => Unit
- case t:Throwable => publish(PiFailureExceptions(id,t))
+ case PiException => Unit
+ case t: Throwable => publish(PiFailureExceptions(id, t))
})
}
//
@@ -90,189 +93,210 @@ class MongoExecutor
// }
// Future.successful(oid)
-
- protected def startUpdate(i:PiInstance[ObjectId]):PiInstance[ObjectId] = {
+ protected def startUpdate(i: PiInstance[ObjectId]): PiInstance[ObjectId] = {
publish(PiEventStart(i))
i.reduce
}
-
- final def postResult(id:ObjectId, ref:Int, res:MetadataAtomicProcess.MetadataAtomicResult):Unit = {
- publish(PiEventReturn(id,ref,PiObject.get(res._1),res._2))
- val promise=Promise[PiInstance[ObjectId]]()
- update(id,postResultUpdate(ref,res._1),0,promise)
- promise.future.recover({
- case PiException => Unit
- case t:Throwable => publish(PiFailureExceptions(id,t))
+
+ final def postResult(id: ObjectId, ref: Int, res: MetadataAtomicProcess.MetadataAtomicResult): Unit = {
+ publish(PiEventReturn(id, ref, PiObject.get(res._1), res._2))
+ val promise = Promise[PiInstance[ObjectId]]()
+ update(id, postResultUpdate(ref, res._1), 0, promise)
+ promise.future.recover({
+ case PiException => Unit
+ case t: Throwable => publish(PiFailureExceptions(id, t))
})
}
-
- protected def postResultUpdate(ref:Int, res:PiObject)(i:PiInstance[ObjectId]):PiInstance[ObjectId] = {
+
+ protected def postResultUpdate(ref: Int, res: PiObject)(i: PiInstance[ObjectId]): PiInstance[ObjectId] = {
System.err.println("*** [" + i.id + "] Handling result for thread " + ref + " : " + res)
- i.postResult(ref, res).reduce
+ i.postResult(ref, res).reduce
}
-
+
// We give a promise as an argument instead of returning a Future in order to avoid chaining promises at each recursive call.
- final protected def update(id:ObjectId, f:PiInstance[ObjectId]=>PiInstance[ObjectId], attempt:Int, promise:Promise[PiInstance[ObjectId]] ):Unit = try {
- implicit val iid:ObjectId = id
-
- val obs = client.startSession(ClientSessionOptions.builder.causallyConsistent(true).build()) safeThen {
- case Seq(session:ClientSession) => {
- System.err.println("*** [" + id + "] Session started")
- col.find(session,equal("_id",id)) safeThen {
- case Seq() => Future {
- System.err.println("*** [" + id + "] CAS (pre) - Closing session")
- session.close()
- if (attempt < CAS_MAX_ATTEMPTS) throw CASException
- else {
- publish(PiFailureNoSuchInstance(id))
- throw PiException
- }
- }
- case Seq(i:PiInstance[ObjectId]) => {
- System.err.println("*** [" + id + "] Got PiInstance: " + i)
- val (toCall,resobs) = put(i,f(i),col,session) //postResult(i,ref,res,col,session)
- resobs safeThen {
- case Seq() => Future {
- System.err.println("*** [" + id + "] CAS (post) - Closing session")
- session.close()
- if (attempt < CAS_MAX_ATTEMPTS) throw CASException
- else throw CASFailureException(id.toString())
- }
- case Seq(_) => Future {
- System.err.println("*** [" + id + "] Closing session")
- session.close()
- (toCall,i)
+ final protected def update(
+ id: ObjectId,
+ f: PiInstance[ObjectId] => PiInstance[ObjectId],
+ attempt: Int,
+ promise: Promise[PiInstance[ObjectId]]
+ ): Unit =
+ try {
+ implicit val iid: ObjectId = id
+
+ val obs = client.startSession(ClientSessionOptions.builder.causallyConsistent(true).build()) safeThen {
+ case Seq(session: ClientSession) => {
+ System.err.println("*** [" + id + "] Session started")
+ col.find(session, equal("_id", id)) safeThen {
+ case Seq() =>
+ Future {
+ System.err.println("*** [" + id + "] CAS (pre) - Closing session")
+ session.close()
+ if (attempt < CAS_MAX_ATTEMPTS) throw CASException
+ else {
+ publish(PiFailureNoSuchInstance(id))
+ throw PiException
+ }
+ }
+ case Seq(i: PiInstance[ObjectId]) => {
+ System.err.println("*** [" + id + "] Got PiInstance: " + i)
+ val (toCall, resobs) = put(i, f(i), col, session) //postResult(i,ref,res,col,session)
+ resobs safeThen {
+ case Seq() =>
+ Future {
+ System.err.println("*** [" + id + "] CAS (post) - Closing session")
+ session.close()
+ if (attempt < CAS_MAX_ATTEMPTS) throw CASException
+ else throw CASFailureException(id.toString())
+ }
+ case Seq(_) =>
+ Future {
+ System.err.println("*** [" + id + "] Closing session")
+ session.close()
+ (toCall, i)
+ }
+ }
}
}
- }
- }
- }}
- obs.onComplete({
- case Success((toCall,i)) => {
- System.err.println("*** [" + id + "] Success")
- try {
- toCall map runThread(i)
- promise.success(i)
- } catch {
- case (e:Exception) => promise.failure(e)
}
}
- case Failure(CASException) => System.err.println("*** [" + id + "] CAS Retry - attempt: " + (attempt+1)); Thread.sleep(CAS_WAIT_MS); update(id,f,attempt+1,promise)
- case Failure(e) => {
- System.err.println("*** [" + id + "] Failed: " + e.getLocalizedMessage)
- promise.failure(e)
+ obs.onComplete({
+ case Success((toCall, i)) => {
+ System.err.println("*** [" + id + "] Success")
+ try {
+ toCall map runThread(i)
+ promise.success(i)
+ } catch {
+ case (e: Exception) => promise.failure(e)
+ }
+ }
+ case Failure(CASException) =>
+ System.err.println("*** [" + id + "] CAS Retry - attempt: " + (attempt + 1)); Thread.sleep(CAS_WAIT_MS);
+ update(id, f, attempt + 1, promise)
+ case Failure(e) => {
+ System.err.println("*** [" + id + "] Failed: " + e.getLocalizedMessage)
+ promise.failure(e)
+ }
+ })
+ } catch {
+ case (e: Exception) => { // this should never happen!
+ System.err.println("*** [" + id + "] FATAL")
+ e.printStackTrace()
+ publish(PiFailureExceptions(id, e))
+ Future.failed(e)
}
- })
- } catch {
- case (e:Exception) => { // this should never happen!
- System.err.println("*** [" + id + "] FATAL")
- e.printStackTrace()
- publish(PiFailureExceptions(id,e))
- Future.failed(e)
}
- }
-
- protected def put(i:PiInstance[ObjectId], ni:PiInstance[ObjectId], col:MongoCollection[PiInstance[ObjectId]], session:ClientSession):(Seq[(Int,PiFuture)],Observable[_]) = {
- val unique = i.called
- if (ni.completed) ni.result match {
- case None => {
- // Delete first, then announce the result, so that we don't do anything (like close the pool) early
- //System.err.println("*** [" + ni.id + "] Completed with no result!")
- (Seq(),(col.findOneAndDelete(session,and(equal("_id",ni.id),equal("calls",unique))) andThen { case _ => publish(PiFailureNoResult(ni))}))
- }
- case Some(res) => {
- //System.err.println("*** [" + ni.id + "] Completed with result!")
- // Delete first, then announce the result, so that we don't do anything (like close the pool) early
- (Seq(),(col.findOneAndDelete(session,and(equal("_id",ni.id),equal("calls",unique))) andThen {case _ => publish(PiEventResult(ni, res))}))
- }
- } else {
- //System.err.println("*** [" + i.id + "] Handling threads after: " + ref)
- val (toCall,resi) = ni.handleThreads(handleThread(ni)) // may throw exception!
- val futureCalls = toCall flatMap (resi.piFutureOf)
- //System.err.println("*** [" + i.id + "] Updating state after: " + ref)
- (toCall zip futureCalls,col.findOneAndReplace(session,and(equal("_id",i.id),equal("calls",unique)), resi))
- }
- }
-
- protected def handleThread(i:PiInstance[ObjectId])(ref:Int,f:PiFuture):Boolean = {
- System.err.println("*** [" + i.id + "] Handling thread: " + ref + " (" + f.fun + ")")
- f match {
- case PiFuture(name, outChan, args) => i.getProc(name) match {
+ protected def put(
+ i: PiInstance[ObjectId],
+ ni: PiInstance[ObjectId],
+ col: MongoCollection[PiInstance[ObjectId]],
+ session: ClientSession
+ ): (Seq[(Int, PiFuture)], Observable[_]) = {
+ val unique = i.called
+ if (ni.completed) ni.result match {
case None => {
- System.err.println("*** [" + i.id + "] ERROR *** Unable to find process: " + name)
- publish(PiFailureUnknownProcess(i,name))
- throw PiException
+ // Delete first, then announce the result, so that we don't do anything (like close the pool) early
+ //System.err.println("*** [" + ni.id + "] Completed with no result!")
+ (Seq(), (col.findOneAndDelete(session, and(equal("_id", ni.id), equal("calls", unique))) andThen {
+ case _ => publish(PiFailureNoResult(ni))
+ }))
}
- case Some(p:MetadataAtomicProcess) => true
- case Some(p:CompositeProcess) => {
- System.err.println("*** [" + i.id + "] Executor encountered composite process thread: " + name)
- publish(PiFailureAtomicProcessIsComposite(i,name))
- throw PiException
+ case Some(res) => {
+ //System.err.println("*** [" + ni.id + "] Completed with result!")
+ // Delete first, then announce the result, so that we don't do anything (like close the pool) early
+ (Seq(), (col.findOneAndDelete(session, and(equal("_id", ni.id), equal("calls", unique))) andThen {
+ case _ => publish(PiEventResult(ni, res))
+ }))
}
+ } else {
+ //System.err.println("*** [" + i.id + "] Handling threads after: " + ref)
+ val (toCall, resi) = ni.handleThreads(handleThread(ni)) // may throw exception!
+ val futureCalls = toCall flatMap (resi.piFutureOf)
+ //System.err.println("*** [" + i.id + "] Updating state after: " + ref)
+ (toCall zip futureCalls, col.findOneAndReplace(session, and(equal("_id", i.id), equal("calls", unique)), resi))
}
- } }
-
-
- protected def runThread(i:PiInstance[ObjectId])(t:(Int,PiFuture)):Unit = {
+ }
+
+ protected def handleThread(i: PiInstance[ObjectId])(ref: Int, f: PiFuture): Boolean = {
+ System.err.println("*** [" + i.id + "] Handling thread: " + ref + " (" + f.fun + ")")
+ f match {
+ case PiFuture(name, outChan, args) =>
+ i.getProc(name) match {
+ case None => {
+ System.err.println("*** [" + i.id + "] ERROR *** Unable to find process: " + name)
+ publish(PiFailureUnknownProcess(i, name))
+ throw PiException
+ }
+ case Some(p: MetadataAtomicProcess) => true
+ case Some(p: CompositeProcess) => {
+ System.err.println("*** [" + i.id + "] Executor encountered composite process thread: " + name)
+ publish(PiFailureAtomicProcessIsComposite(i, name))
+ throw PiException
+ }
+ }
+ }
+ }
+
+ protected def runThread(i: PiInstance[ObjectId])(t: (Int, PiFuture)): Unit = {
System.err.println("*** [" + i.id + "] Handling thread: " + t._1 + " (" + t._2.fun + ")")
t match {
- case (ref,PiFuture(name, outChan, args)) => i.getProc(name) match {
- case None => {
- // This should never happen! We already checked!
- System.err.println("*** [" + i.id + "] ERROR *** Unable to find process: " + name + " even though we checked already")
- publish(PiFailureUnknownProcess(i, name))
- }
- case Some(p:MetadataAtomicProcess) => {
- val objs = args map (_.obj)
- p.runMeta(objs).onComplete{
- case Success(res) => {
+ case (ref, PiFuture(name, outChan, args)) =>
+ i.getProc(name) match {
+ case None => {
+ // This should never happen! We already checked!
+ System.err.println(
+ "*** [" + i.id + "] ERROR *** Unable to find process: " + name + " even though we checked already"
+ )
+ publish(PiFailureUnknownProcess(i, name))
+ }
+ case Some(p: MetadataAtomicProcess) => {
+ val objs = args map (_.obj)
+ p.runMeta(objs).onComplete {
+ case Success(res) => {
- postResult(i.id,ref,res)
+ postResult(i.id, ref, res)
+ }
+ case Failure(ex) => publish(PiFailureAtomicProcessException(i.id, ref, ex))
+ }
+ System.err.println("*** [" + i.id + "] Called process: " + p.name + " ref:" + ref)
+ }
+ case Some(p: CompositeProcess) => { // This should never happen! We already checked!
+ System.err.println("*** [" + i.id + "] Executor encountered composite process thread: " + name)
+ publish(PiFailureAtomicProcessIsComposite(i, name))
}
- case Failure (ex) => publish(PiFailureAtomicProcessException(i.id,ref,ex))
}
- System.err.println("*** [" + i.id + "] Called process: " + p.name + " ref:" + ref)
- }
- case Some(p:CompositeProcess) => {// This should never happen! We already checked!
- System.err.println("*** [" + i.id + "] Executor encountered composite process thread: " + name)
- publish(PiFailureAtomicProcessIsComposite(i, name))
- }
}
- } }
-
-
- implicit class SafeObservable[T](obs: Observable[T])(implicit id:ObjectId) {
+ }
+
+ implicit class SafeObservable[T](obs: Observable[T])(implicit id: ObjectId) {
def safeThen[U](f: Seq[T] => Future[U]): Future[U] = {
val p = Promise[U]()
- obs.toFuture().onComplete({
- case Success(r:Seq[T]) => try {
- f(r).onComplete({
- case Success(s) => p.success(s)
- case Failure(e:Throwable) => p.failure(e)
- })
- } catch {
- case (exe:Exception) => p.failure(exe)
- }
- case Failure(ex:Throwable) => p.failure(ex)
- })
+ obs
+ .toFuture()
+ .onComplete({
+ case Success(r: Seq[T]) =>
+ try {
+ f(r).onComplete({
+ case Success(s) => p.success(s)
+ case Failure(e: Throwable) => p.failure(e)
+ })
+ } catch {
+ case (exe: Exception) => p.failure(exe)
+ }
+ case Failure(ex: Throwable) => p.failure(ex)
+ })
p.future
}
}
final case object CASException extends Exception("CAS")
- final case object PiException extends Exception("Pi")
- final case class CASFailureException(val id:String, private val cause: Throwable = None.orNull)
- extends Exception("Compare-and-swap failed after " + CAS_MAX_ATTEMPTS + " attempts for id: " + id , cause)
- }
-
-
-
-//class MongoFutureExecutor(client:MongoClient, db:String, collection:String, processes:PiProcessStore,timeout:Duration=10.seconds)(override implicit val context: ExecutionContext = ExecutionContext.global)
-// extends MongoExecutor[PromiseHandler.ResultT](client, db, collection, new PromiseHandler[ObjectId], processes:PiProcessStore,timeout)(context) with FutureExecutor
-//
+ final case object PiException extends Exception("Pi")
+ final case class CASFailureException(val id: String, private val cause: Throwable = None.orNull)
+ extends Exception("Compare-and-swap failed after " + CAS_MAX_ATTEMPTS + " attempts for id: " + id, cause)
+}
+//class MongoFutureExecutor(client:MongoClient, db:String, collection:String, processes:PiProcessStore,timeout:Duration=10.seconds)(override implicit val context: ExecutionContext = ExecutionContext.global)
+// extends MongoExecutor[PromiseHandler.ResultT](client, db, collection, new PromiseHandler[ObjectId], processes:PiProcessStore,timeout)(context) with FutureExecutor
+//
//object MongoFutureExecutor {
-// def apply(client:MongoClient, db:String, collection:String, l:PiProcess*) = new MongoFutureExecutor(client,db,collection,SimpleProcessStore(l :_*))
+// def apply(client:MongoClient, db:String, collection:String, l:PiProcess*) = new MongoFutureExecutor(client,db,collection,SimpleProcessStore(l :_*))
//}
-
-
diff --git a/src/com/workflowfm/pew/mongodb/MongoStore.scala b/src/com/workflowfm/pew/mongodb/MongoStore.scala
index ec5d5ac..632d91f 100644
--- a/src/com/workflowfm/pew/mongodb/MongoStore.scala
+++ b/src/com/workflowfm/pew/mongodb/MongoStore.scala
@@ -1,2 +1 @@
package com.workflowfm.pew.mongodb
-
diff --git a/src/com/workflowfm/pew/mongodb/bson/AnyCodec.scala b/src/com/workflowfm/pew/mongodb/bson/AnyCodec.scala
index 660f436..16c1717 100644
--- a/src/com/workflowfm/pew/mongodb/bson/AnyCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/AnyCodec.scala
@@ -12,18 +12,15 @@ import org.bson.codecs.configuration.CodecRegistry
*
* @param registry Registry to use to find `Codec`s.
*/
-class AnyCodec( val registry: CodecRegistry )
- extends Codec[Any] {
+class AnyCodec(val registry: CodecRegistry) extends Codec[Any] {
val classN: String = "class"
val childN: String = "child"
- def codec( clazz: Class[_] ): Codec[Any]
- = registry.get( clazz.asInstanceOf[Class[Any]] )
+ def codec(clazz: Class[_]): Codec[Any] = registry.get(clazz.asInstanceOf[Class[Any]])
// Jev, unset the `ClassLoader` to ensure the default is used.
- def classForName( name: String ): Class[_]
- = ClassLoaderUtil.withClassLoader(null) { Class.forName(name) }
+ def classForName(name: String): Class[_] = ClassLoaderUtil.withClassLoader(null) { Class.forName(name) }
override def encode(writer: BsonWriter, value: Any, ctx: EncoderContext): Unit = {
@@ -32,11 +29,11 @@ class AnyCodec( val registry: CodecRegistry )
// Jev, `getCanonicalName` produces incorrect results for packaged or inner classes.
// We need the `fully qualified` names for `Class.forName`, eg, "some.package.Object$Innerclass"
val className: String = value.getClass.getName // .getCanonicalName)
- classForName( className ) // throw a ClassNotFound error if we won't be able to decode this.
+ classForName(className) // throw a ClassNotFound error if we won't be able to decode this.
writer.writeString(classN, className)
- writer.writeName( childN )
- ctx.encodeWithChildContext( codec( value.getClass ), writer, value )
+ writer.writeName(childN)
+ ctx.encodeWithChildContext(codec(value.getClass), writer, value)
writer.writeEndDocument()
}
@@ -45,10 +42,10 @@ class AnyCodec( val registry: CodecRegistry )
reader.readStartDocument()
- val anyClass: Class[_] = classForName( reader.readString( classN ) )
+ val anyClass: Class[_] = classForName(reader.readString(classN))
- reader.readName( childN )
- val anyValue: Any = ctx.decodeWithChildContext[Any]( codec( anyClass ), reader )
+ reader.readName(childN)
+ val anyValue: Any = ctx.decodeWithChildContext[Any](codec(anyClass), reader)
reader.readEndDocument()
diff --git a/src/com/workflowfm/pew/mongodb/bson/BsonUtil.scala b/src/com/workflowfm/pew/mongodb/bson/BsonUtil.scala
index 9fb3f2e..dd2c620 100644
--- a/src/com/workflowfm/pew/mongodb/bson/BsonUtil.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/BsonUtil.scala
@@ -11,30 +11,27 @@ import scala.collection.mutable
object BsonUtil {
- def writeArray[T]( writer: BsonWriter, name: String, col: Seq[T] )( fn: T => Unit ): Unit = {
- writer.writeStartArray( name )
- col.foreach( fn )
+ def writeArray[T](writer: BsonWriter, name: String, col: Seq[T])(fn: T => Unit): Unit = {
+ writer.writeStartArray(name)
+ col.foreach(fn)
writer.writeEndArray()
}
- def readArray[Elem, That]
- ( reader: BsonReader, name: String )
- ( fn: () => Elem )
- ( implicit builder: CanBuildFrom[_, Elem, That] )
- : That = {
+ def readArray[Elem, That](reader: BsonReader, name: String)(
+ fn: () => Elem
+ )(implicit builder: CanBuildFrom[_, Elem, That]): That = {
- reader.readName( name )
+ reader.readName(name)
reader.readStartArray()
var args: mutable.Builder[Elem, That] = builder.apply()
- while (reader.readBsonType() != BsonType.END_OF_DOCUMENT)
- args += fn()
+ while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) args += fn()
reader.readEndArray()
args.result()
}
- def writeObjectSeq[Elem]( writer: BsonWriter, name: String, col: Seq[Elem] ): Unit = {
+ def writeObjectSeq[Elem](writer: BsonWriter, name: String, col: Seq[Elem]): Unit = {
val data: Array[Byte] = {
val baos = new ByteArrayOutputStream()
@@ -47,16 +44,15 @@ object BsonUtil {
baos.toByteArray
}
- writer.writeBinaryData( name, bson.BsonBinary(data) )
+ writer.writeBinaryData(name, bson.BsonBinary(data))
}
- def readObjectSeq[Elem, That]
- ( reader: BsonReader, name: String )
- ( implicit builder: CanBuildFrom[_, Elem, That] )
- : That = {
+ def readObjectSeq[Elem, That](reader: BsonReader, name: String)(
+ implicit builder: CanBuildFrom[_, Elem, That]
+ ): That = {
- val binary: BsonBinary = reader.readBinaryData( name )
- val ois = new ObjectInputStream( new ByteArrayInputStream( binary.getData ) )
+ val binary: BsonBinary = reader.readBinaryData(name)
+ val ois = new ObjectInputStream(new ByteArrayInputStream(binary.getData))
val size: Int = ois.readInt()
diff --git a/src/com/workflowfm/pew/mongodb/bson/PiCodecProvider.scala b/src/com/workflowfm/pew/mongodb/bson/PiCodecProvider.scala
index 8c65d7e..78381d9 100644
--- a/src/com/workflowfm/pew/mongodb/bson/PiCodecProvider.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/PiCodecProvider.scala
@@ -8,36 +8,36 @@ import org.bson.codecs.configuration.CodecRegistry
import org.bson.types.ObjectId
/**
- * Provider for Codecs for PiObject, Chan, ChanMap, PiResource, PiFuture
- *
- * Created using this as an example:
- * https://github.com/mongodb/mongo-scala-driver/blob/master/bson/src/main/scala/org/mongodb/scala/bson/codecs/DocumentCodecProvider.scala
- */
-class PiCodecProvider(processes:PiProcessStore = SimpleProcessStore()) extends CodecProvider { //bsonTypeClassMap:BsonTypeClassMap
- val OBJIDCLASS: Class[ObjectId] = classOf[ObjectId]
+ * Provider for Codecs for PiObject, Chan, ChanMap, PiResource, PiFuture
+ *
+ * Created using this as an example:
+ * https://github.com/mongodb/mongo-scala-driver/blob/master/bson/src/main/scala/org/mongodb/scala/bson/codecs/DocumentCodecProvider.scala
+ */
+class PiCodecProvider(processes: PiProcessStore = SimpleProcessStore()) extends CodecProvider { //bsonTypeClassMap:BsonTypeClassMap
+ val OBJIDCLASS: Class[ObjectId] = classOf[ObjectId]
val PIPROCCLASS: Class[PiProcess] = classOf[PiProcess]
- val OBJCLASS:Class[PiObject] = classOf[PiObject]
- val CHANCLASS:Class[Chan] = classOf[Chan]
- val CHANMAPCLASS:Class[ChanMap] = classOf[ChanMap]
- val RESOURCECLASS:Class[PiResource] = classOf[PiResource]
- val FUTURECLASS:Class[PiFuture] = classOf[PiFuture]
- val STATECLASS:Class[PiState] = classOf[PiState]
- val TERMCLASS:Class[Term] = classOf[Term]
- val INSTANCECLASS:Class[PiInstance[ObjectId]] = classOf[PiInstance[ObjectId]]
-
- override def get[T](clazz:Class[T], registry:CodecRegistry):Codec[T]
- = (clazz match {
- case OBJIDCLASS => new helper.ObjectIdCodec()
- case PIPROCCLASS => new PiProcessCodec( processes )
- case OBJCLASS => new PiObjectCodec(registry)
- case CHANCLASS => new ChanCodec
- case CHANMAPCLASS => new ChanMapCodec(registry)
- case RESOURCECLASS => new PiResourceCodec(registry)
- case FUTURECLASS => new PiFutureCodec(registry)
- case STATECLASS => new PiStateCodec(registry,processes)
- case TERMCLASS => new TermCodec(registry)
- case INSTANCECLASS => new PiInstanceCodec(registry, processes)
- case _ => null
+ val OBJCLASS: Class[PiObject] = classOf[PiObject]
+ val CHANCLASS: Class[Chan] = classOf[Chan]
+ val CHANMAPCLASS: Class[ChanMap] = classOf[ChanMap]
+ val RESOURCECLASS: Class[PiResource] = classOf[PiResource]
+ val FUTURECLASS: Class[PiFuture] = classOf[PiFuture]
+ val STATECLASS: Class[PiState] = classOf[PiState]
+ val TERMCLASS: Class[Term] = classOf[Term]
+ val INSTANCECLASS: Class[PiInstance[ObjectId]] = classOf[PiInstance[ObjectId]]
+
+ override def get[T](clazz: Class[T], registry: CodecRegistry): Codec[T] =
+ (clazz match {
+ case OBJIDCLASS => new helper.ObjectIdCodec()
+ case PIPROCCLASS => new PiProcessCodec(processes)
+ case OBJCLASS => new PiObjectCodec(registry)
+ case CHANCLASS => new ChanCodec
+ case CHANMAPCLASS => new ChanMapCodec(registry)
+ case RESOURCECLASS => new PiResourceCodec(registry)
+ case FUTURECLASS => new PiFutureCodec(registry)
+ case STATECLASS => new PiStateCodec(registry, processes)
+ case TERMCLASS => new TermCodec(registry)
+ case INSTANCECLASS => new PiInstanceCodec(registry, processes)
+ case _ => null
}).asInstanceOf[Codec[T]]
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/mongodb/bson/auto/AutoCodecRegistry.scala b/src/com/workflowfm/pew/mongodb/bson/auto/AutoCodecRegistry.scala
index 7d13dda..0433e4b 100644
--- a/src/com/workflowfm/pew/mongodb/bson/auto/AutoCodecRegistry.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/auto/AutoCodecRegistry.scala
@@ -18,10 +18,9 @@ trait AutoCodecStore {
/** Register a new Codec with this `AutoCodecStore`
*
- * @param codec The new codec to register with this `AutoCodecStore`.
+ * @param codec The new codec to register with this `AutoCodecStore`.
*/
- final def registerCodec[CodecT <: Codec[_]](codec: CodecT)
- : CodecT = this.synchronized {
+ final def registerCodec[CodecT <: Codec[_]](codec: CodecT): CodecT = this.synchronized {
val clazz: Class[_] = codec.getEncoderClass
@@ -62,8 +61,8 @@ trait AutoCodecStore {
/** Dumb helper inner-trait to automatically register instances of this trait with
* the parent `AutoCodecStore`.
*/
- trait AutoCodec { this:Codec[_] =>
- registerCodec( this )
+ trait AutoCodec { this: Codec[_] =>
+ registerCodec(this)
}
}
@@ -72,10 +71,10 @@ trait AutoCodecStore {
*/
trait AutoCodecRegistry extends AutoCodecStore with CodecRegistry {
- override def get[T](clazz: Class[T]): Codec[T]
- = registeredCodecs
- .get( clazz )
- .map( _.asInstanceOf[Codec[T]] )
+ override def get[T](clazz: Class[T]): Codec[T] =
+ registeredCodecs
+ .get(clazz)
+ .map(_.asInstanceOf[Codec[T]])
.orNull
}
@@ -83,24 +82,22 @@ trait AutoCodecRegistry extends AutoCodecStore with CodecRegistry {
*/
trait AutoCodecProvider extends AutoCodecStore with CodecProvider {
- override def get[T]( clazz: Class[T], reg: CodecRegistry ): Codec[T]
- = registeredCodecs
- .get( clazz )
- .map( _.asInstanceOf[Codec[T]] )
- .getOrElse( reg.get( clazz ) )
+ override def get[T](clazz: Class[T], reg: CodecRegistry): Codec[T] =
+ registeredCodecs
+ .get(clazz)
+ .map(_.asInstanceOf[Codec[T]])
+ .getOrElse(reg.get(clazz))
}
/** Implements both `CodecRegistry` and `CodecProvider` interfaces using an `AutoCodecStore`.
*/
-trait AutoCodecRegistryExt
- extends AutoCodecProvider with CodecRegistry {
+trait AutoCodecRegistryExt extends AutoCodecProvider with CodecRegistry {
/** Base or builtin `CodecRegistry` to fall back on if a Codec is requested for a class
* which hasn't been registered.
*/
val baseRegistry: CodecRegistry
- override def get[T](clazz: Class[T]): Codec[T]
- = get( clazz, baseRegistry )
-}
\ No newline at end of file
+ override def get[T](clazz: Class[T]): Codec[T] = get(clazz, baseRegistry)
+}
diff --git a/src/com/workflowfm/pew/mongodb/bson/auto/AutoCodecTypes.scala b/src/com/workflowfm/pew/mongodb/bson/auto/AutoCodecTypes.scala
index 3cb34ce..779936e 100644
--- a/src/com/workflowfm/pew/mongodb/bson/auto/AutoCodecTypes.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/auto/AutoCodecTypes.scala
@@ -6,10 +6,9 @@ import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
import scala.collection.mutable
import scala.reflect.ClassTag
-abstract class TypedCodec[T]( implicit ct: ClassTag[T] )
- extends Codec[T] {
+abstract class TypedCodec[T](implicit ct: ClassTag[T]) extends Codec[T] {
- val clazz: Class[T] = ct.runtimeClass.asInstanceOf[Class[T]]
+ val clazz: Class[T] = ct.runtimeClass.asInstanceOf[Class[T]]
val _typeVal: String = clazz.getName
final override def getEncoderClass: Class[T] = clazz
@@ -20,26 +19,25 @@ object TypedCodec {
val _typeN: String = "__typeID"
}
-abstract class ClassCodec[T]( implicit ct: ClassTag[T] )
- extends TypedCodec[T]()( ct ) {
+abstract class ClassCodec[T](implicit ct: ClassTag[T]) extends TypedCodec[T]()(ct) {
import TypedCodec._
- def decodeBody( reader: BsonReader, ctx: DecoderContext ): T
- def encodeBody( writer: BsonWriter, value: T, ctx: EncoderContext ): Unit
+ def decodeBody(reader: BsonReader, ctx: DecoderContext): T
+ def encodeBody(writer: BsonWriter, value: T, ctx: EncoderContext): Unit
final override def decode(reader: BsonReader, ctx: DecoderContext): T = {
reader.readStartDocument()
- reader.readString( _typeN )
- val t: T = decodeBody( reader, ctx )
+ reader.readString(_typeN)
+ val t: T = decodeBody(reader, ctx)
reader.readEndDocument()
t
}
final override def encode(writer: BsonWriter, value: T, ctx: EncoderContext): Unit = {
writer.writeStartDocument()
- writer.writeString( _typeN, _typeVal )
- encodeBody( writer, value, ctx )
+ writer.writeString(_typeN, _typeVal)
+ encodeBody(writer, value, ctx)
writer.writeEndDocument()
}
}
@@ -48,20 +46,18 @@ abstract class ClassCodec[T]( implicit ct: ClassTag[T] )
* - Examines the class name when decoding to defer to the correct ClassCodec for de-serialisation
* - Uses the value class to defer to the correct ClassCodec for serialisation.
*/
-class SuperclassCodec[T]( implicit ct: ClassTag[T] )
- extends TypedCodec[T]()( ct ) {
+class SuperclassCodec[T](implicit ct: ClassTag[T]) extends TypedCodec[T]()(ct) {
import TypedCodec._
private val byClass: mutable.Map[Class[_], ClassCodec[_]] = mutable.Map()
private val byClassId: mutable.Map[String, ClassCodec[_]] = mutable.Map()
- def updateWith( codec: ClassCodec[_] ): Unit
- = if ( clazz isAssignableFrom codec.clazz ) {
- println( s"SuperclassCodec(${_typeVal}): Registered subclass '${codec._typeVal}'.")
- byClass.update( codec.clazz, codec )
- byClassId.update( codec._typeVal, codec )
- }
+ def updateWith(codec: ClassCodec[_]): Unit = if (clazz isAssignableFrom codec.clazz) {
+ println(s"SuperclassCodec(${_typeVal}): Registered subclass '${codec._typeVal}'.")
+ byClass.update(codec.clazz, codec)
+ byClassId.update(codec._typeVal, codec)
+ }
def knownChildren: Iterable[ClassCodec[_]] = byClass.values
@@ -69,14 +65,14 @@ class SuperclassCodec[T]( implicit ct: ClassTag[T] )
val mark: BsonReaderMark = reader.getMark
reader.readStartDocument()
- val typeId: String = reader.readString( _typeN )
+ val typeId: String = reader.readString(_typeN)
mark.reset()
- ctx.decodeWithChildContext( byClassId( typeId ), reader ).asInstanceOf[T]
+ ctx.decodeWithChildContext(byClassId(typeId), reader).asInstanceOf[T]
}
- override def encode(writer: BsonWriter, value: T, ctx: EncoderContext): Unit
- = byClass( value.getClass )
+ override def encode(writer: BsonWriter, value: T, ctx: EncoderContext): Unit =
+ byClass(value.getClass)
.asInstanceOf[ClassCodec[T]]
- .encode( writer, value, ctx )
-}
\ No newline at end of file
+ .encode(writer, value, ctx)
+}
diff --git a/src/com/workflowfm/pew/mongodb/bson/events/PiEventCallCodec.scala b/src/com/workflowfm/pew/mongodb/bson/events/PiEventCallCodec.scala
index 5d4915e..fce18b5 100644
--- a/src/com/workflowfm/pew/mongodb/bson/events/PiEventCallCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/events/PiEventCallCodec.scala
@@ -11,54 +11,52 @@ class PiEventCallCodec[T](
objCodec: Codec[PiObject],
procCodec: Codec[PiProcess],
metaCodec: Codec[PiMetadataMap]
-
- ) extends ClassCodec[PiEventCall[T]] {
+) extends ClassCodec[PiEventCall[T]] {
import com.workflowfm.pew.mongodb.bson.BsonUtil._
- val piiIdN: String = "piid"
- val callRefN: String = "ref"
+ val piiIdN: String = "piid"
+ val callRefN: String = "ref"
val atomicProcN: String = "proc"
- val argsN: String = "args"
- val timeN: String = "timestamp"
+ val argsN: String = "args"
+ val timeN: String = "timestamp"
override def encodeBody(writer: BsonWriter, value: PiEventCall[T], ctx: EncoderContext): Unit = {
- writer.writeName( piiIdN )
- ctx.encodeWithChildContext( tCodec, writer, value.id )
+ writer.writeName(piiIdN)
+ ctx.encodeWithChildContext(tCodec, writer, value.id)
- writer.writeInt32( callRefN, value.ref )
+ writer.writeInt32(callRefN, value.ref)
- writer.writeName( atomicProcN )
- ctx.encodeWithChildContext( procCodec, writer, value.p )
+ writer.writeName(atomicProcN)
+ ctx.encodeWithChildContext(procCodec, writer, value.p)
- writeArray( writer, argsN, value.args ) {
- ctx.encodeWithChildContext( objCodec, writer, _ )
+ writeArray(writer, argsN, value.args) {
+ ctx.encodeWithChildContext(objCodec, writer, _)
}
- writer.writeName( timeN )
- ctx.encodeWithChildContext( metaCodec, writer, value.metadata )
+ writer.writeName(timeN)
+ ctx.encodeWithChildContext(metaCodec, writer, value.metadata)
}
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiEventCall[T] = {
- reader.readName( piiIdN )
- val tId: T = ctx.decodeWithChildContext( tCodec, reader )
+ reader.readName(piiIdN)
+ val tId: T = ctx.decodeWithChildContext(tCodec, reader)
- val ref: Int = reader.readInt32( callRefN )
+ val ref: Int = reader.readInt32(callRefN)
- val proc: MetadataAtomicProcess
- = ctx.decodeWithChildContext( procCodec, reader )
- .asInstanceOf[MetadataAtomicProcess]
+ val proc: MetadataAtomicProcess = ctx
+ .decodeWithChildContext(procCodec, reader)
+ .asInstanceOf[MetadataAtomicProcess]
- val args: Seq[PiObject]
- = readArray( reader, argsN ) { () =>
- ctx.decodeWithChildContext( objCodec, reader )
- }
+ val args: Seq[PiObject] = readArray(reader, argsN) { () =>
+ ctx.decodeWithChildContext(objCodec, reader)
+ }
- reader.readName( timeN )
- val data: PiMetadataMap = ctx.decodeWithChildContext( metaCodec, reader )
+ reader.readName(timeN)
+ val data: PiMetadataMap = ctx.decodeWithChildContext(metaCodec, reader)
- PiEventCall( tId, ref, proc, args, data )
+ PiEventCall(tId, ref, proc, args, data)
}
}
diff --git a/src/com/workflowfm/pew/mongodb/bson/events/PiEventExceptionCodec.scala b/src/com/workflowfm/pew/mongodb/bson/events/PiEventExceptionCodec.scala
index e186ff8..e4a5d64 100644
--- a/src/com/workflowfm/pew/mongodb/bson/events/PiEventExceptionCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/events/PiEventExceptionCodec.scala
@@ -6,39 +6,39 @@ import com.workflowfm.pew.mongodb.bson.auto.ClassCodec
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
import org.bson.{BsonReader, BsonWriter}
-class PiEventExceptionCodec[T]( tCodec: Codec[T], metaCodec: Codec[PiMetadataMap] )
- extends ClassCodec[PiFailureExceptions[T]] {
+class PiEventExceptionCodec[T](tCodec: Codec[T], metaCodec: Codec[PiMetadataMap])
+ extends ClassCodec[PiFailureExceptions[T]] {
import com.workflowfm.pew.mongodb.bson.BsonUtil._
- val tIdN: String = "tId"
- val messageN: String = "msg"
+ val tIdN: String = "tId"
+ val messageN: String = "msg"
val stackTraceN: String = "trace"
- val timeN: String = "timestamp"
+ val timeN: String = "timestamp"
override def encodeBody(writer: BsonWriter, value: PiFailureExceptions[T], ctx: EncoderContext): Unit = {
- writer.writeName( tIdN )
- ctx.encodeWithChildContext( tCodec, writer, value.id )
+ writer.writeName(tIdN)
+ ctx.encodeWithChildContext(tCodec, writer, value.id)
- writer.writeString( messageN, value.message )
- writeObjectSeq( writer, stackTraceN, value.trace.toSeq )
+ writer.writeString(messageN, value.message)
+ writeObjectSeq(writer, stackTraceN, value.trace.toSeq)
- writer.writeName( timeN )
- ctx.encodeWithChildContext( metaCodec, writer, value.metadata )
+ writer.writeName(timeN)
+ ctx.encodeWithChildContext(metaCodec, writer, value.metadata)
}
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiFailureExceptions[T] = {
- reader.readName( tIdN )
- val tId: T = ctx.decodeWithChildContext( tCodec, reader )
+ reader.readName(tIdN)
+ val tId: T = ctx.decodeWithChildContext(tCodec, reader)
- val msg: String = reader.readString( messageN )
- val trace: Array[StackTraceElement] = readObjectSeq( reader, stackTraceN )
+ val msg: String = reader.readString(messageN)
+ val trace: Array[StackTraceElement] = readObjectSeq(reader, stackTraceN)
- reader.readName( timeN )
- val data: PiMetadataMap = ctx.decodeWithChildContext( metaCodec, reader )
-
- PiFailureExceptions( tId, msg, trace, data )
+ reader.readName(timeN)
+ val data: PiMetadataMap = ctx.decodeWithChildContext(metaCodec, reader)
+
+ PiFailureExceptions(tId, msg, trace, data)
}
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/mongodb/bson/events/PiEventProcessExceptionCodec.scala b/src/com/workflowfm/pew/mongodb/bson/events/PiEventProcessExceptionCodec.scala
index 43d3f02..95683ec 100644
--- a/src/com/workflowfm/pew/mongodb/bson/events/PiEventProcessExceptionCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/events/PiEventProcessExceptionCodec.scala
@@ -7,40 +7,40 @@ import com.workflowfm.pew.mongodb.bson.auto.ClassCodec
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
import org.bson.{BsonReader, BsonWriter}
-class PiEventProcessExceptionCodec[T]( tCodec: Codec[T], metaCodec: Codec[PiMetadataMap] )
- extends ClassCodec[PiFailureAtomicProcessException[T]] {
+class PiEventProcessExceptionCodec[T](tCodec: Codec[T], metaCodec: Codec[PiMetadataMap])
+ extends ClassCodec[PiFailureAtomicProcessException[T]] {
- val tIdN: String = "tId"
- val callRefN: String = "ref"
- val messageN: String = "msg"
+ val tIdN: String = "tId"
+ val callRefN: String = "ref"
+ val messageN: String = "msg"
val stackTraceN: String = "trace"
- val timeN: String = "timestamp"
+ val timeN: String = "timestamp"
override def encodeBody(writer: BsonWriter, value: PiFailureAtomicProcessException[T], ctx: EncoderContext): Unit = {
- writer.writeName( tIdN )
- ctx.encodeWithChildContext( tCodec, writer, value.id )
+ writer.writeName(tIdN)
+ ctx.encodeWithChildContext(tCodec, writer, value.id)
- writer.writeInt32( callRefN, value.ref )
- writer.writeString( messageN, value.message )
- writeObjectSeq( writer, stackTraceN, value.trace.toSeq )
+ writer.writeInt32(callRefN, value.ref)
+ writer.writeString(messageN, value.message)
+ writeObjectSeq(writer, stackTraceN, value.trace.toSeq)
- writer.writeName( timeN )
- ctx.encodeWithChildContext( metaCodec, writer, value.metadata )
+ writer.writeName(timeN)
+ ctx.encodeWithChildContext(metaCodec, writer, value.metadata)
}
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiFailureAtomicProcessException[T] = {
- reader.readName( tIdN )
- val tId: T = ctx.decodeWithChildContext( tCodec, reader )
+ reader.readName(tIdN)
+ val tId: T = ctx.decodeWithChildContext(tCodec, reader)
- val ref: Int = reader.readInt32( callRefN )
- val msg: String = reader.readString( messageN )
- val trace: Array[StackTraceElement] = readObjectSeq( reader, stackTraceN )
+ val ref: Int = reader.readInt32(callRefN)
+ val msg: String = reader.readString(messageN)
+ val trace: Array[StackTraceElement] = readObjectSeq(reader, stackTraceN)
- reader.readName( timeN )
- val data: PiMetadataMap = ctx.decodeWithChildContext( metaCodec, reader )
-
- PiFailureAtomicProcessException( tId, ref, msg, trace, data )
+ reader.readName(timeN)
+ val data: PiMetadataMap = ctx.decodeWithChildContext(metaCodec, reader)
+
+ PiFailureAtomicProcessException(tId, ref, msg, trace, data)
}
}
diff --git a/src/com/workflowfm/pew/mongodb/bson/events/PiEventResultCodec.scala b/src/com/workflowfm/pew/mongodb/bson/events/PiEventResultCodec.scala
index 3cd770b..1c780ef 100644
--- a/src/com/workflowfm/pew/mongodb/bson/events/PiEventResultCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/events/PiEventResultCodec.scala
@@ -6,36 +6,36 @@ import com.workflowfm.pew.{PiEventResult, PiInstance}
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
import org.bson.{BsonReader, BsonWriter}
-class PiEventResultCodec[T]( piiCodec: Codec[PiInstance[T]], anyCodec: Codec[Any], metaCodec: Codec[PiMetadataMap] )
- extends ClassCodec[PiEventResult[T]]{
+class PiEventResultCodec[T](piiCodec: Codec[PiInstance[T]], anyCodec: Codec[Any], metaCodec: Codec[PiMetadataMap])
+ extends ClassCodec[PiEventResult[T]] {
- val piiN: String = "pii"
+ val piiN: String = "pii"
val resultN: String = "result"
- val timeN: String = "timestamp"
+ val timeN: String = "timestamp"
override def encodeBody(writer: BsonWriter, value: PiEventResult[T], ctx: EncoderContext): Unit = {
- writer.writeName( piiN )
- ctx.encodeWithChildContext( piiCodec, writer, value.i )
+ writer.writeName(piiN)
+ ctx.encodeWithChildContext(piiCodec, writer, value.i)
- writer.writeName( resultN )
- ctx.encodeWithChildContext( anyCodec, writer, value.res )
+ writer.writeName(resultN)
+ ctx.encodeWithChildContext(anyCodec, writer, value.res)
- writer.writeName( timeN )
- ctx.encodeWithChildContext( metaCodec, writer, value.metadata )
+ writer.writeName(timeN)
+ ctx.encodeWithChildContext(metaCodec, writer, value.metadata)
}
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiEventResult[T] = {
- reader.readName( piiN )
- val pii: PiInstance[T] = ctx.decodeWithChildContext( piiCodec, reader )
+ reader.readName(piiN)
+ val pii: PiInstance[T] = ctx.decodeWithChildContext(piiCodec, reader)
- reader.readName( resultN )
- val result: Any = ctx.decodeWithChildContext( anyCodec, reader )
+ reader.readName(resultN)
+ val result: Any = ctx.decodeWithChildContext(anyCodec, reader)
- reader.readName( timeN )
- val data: PiMetadataMap = ctx.decodeWithChildContext( metaCodec, reader )
-
- PiEventResult( pii, result, data )
+ reader.readName(timeN)
+ val data: PiMetadataMap = ctx.decodeWithChildContext(metaCodec, reader)
+
+ PiEventResult(pii, result, data)
}
}
diff --git a/src/com/workflowfm/pew/mongodb/bson/events/PiEventReturnCodec.scala b/src/com/workflowfm/pew/mongodb/bson/events/PiEventReturnCodec.scala
index 0721389..eecb083 100644
--- a/src/com/workflowfm/pew/mongodb/bson/events/PiEventReturnCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/events/PiEventReturnCodec.scala
@@ -6,41 +6,41 @@ import com.workflowfm.pew.mongodb.bson.auto.ClassCodec
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
import org.bson.{BsonReader, BsonWriter}
-class PiEventReturnCodec[T]( tCodec: Codec[T], anyCodec: Codec[Any], metaCodec: Codec[PiMetadataMap] )
- extends ClassCodec[PiEventReturn[T]] {
+class PiEventReturnCodec[T](tCodec: Codec[T], anyCodec: Codec[Any], metaCodec: Codec[PiMetadataMap])
+ extends ClassCodec[PiEventReturn[T]] {
- val piiN: String = "pii"
- val refN: String = "ref"
- val resN: String = "res"
+ val piiN: String = "pii"
+ val refN: String = "ref"
+ val resN: String = "res"
val timeN: String = "timestamp"
override def encodeBody(writer: BsonWriter, value: PiEventReturn[T], ctx: EncoderContext): Unit = {
- writer.writeName( piiN )
- ctx.encodeWithChildContext( tCodec, writer, value.id )
+ writer.writeName(piiN)
+ ctx.encodeWithChildContext(tCodec, writer, value.id)
- writer.writeInt32( refN, value.ref )
+ writer.writeInt32(refN, value.ref)
- writer.writeName( resN )
- ctx.encodeWithChildContext( anyCodec, writer, value.result )
+ writer.writeName(resN)
+ ctx.encodeWithChildContext(anyCodec, writer, value.result)
- writer.writeName( timeN )
- ctx.encodeWithChildContext( metaCodec, writer, value.metadata )
+ writer.writeName(timeN)
+ ctx.encodeWithChildContext(metaCodec, writer, value.metadata)
}
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiEventReturn[T] = {
- reader.readName( piiN )
- val id: T = ctx.decodeWithChildContext( tCodec, reader )
+ reader.readName(piiN)
+ val id: T = ctx.decodeWithChildContext(tCodec, reader)
- val ref: Int = reader.readInt32( refN )
+ val ref: Int = reader.readInt32(refN)
- reader.readName( resN )
- val result: Any = ctx.decodeWithChildContext( anyCodec, reader )
+ reader.readName(resN)
+ val result: Any = ctx.decodeWithChildContext(anyCodec, reader)
- reader.readName( timeN )
- val data: PiMetadataMap = ctx.decodeWithChildContext( metaCodec, reader )
-
- PiEventReturn( id, ref, result, data )
+ reader.readName(timeN)
+ val data: PiMetadataMap = ctx.decodeWithChildContext(metaCodec, reader)
+
+ PiEventReturn(id, ref, result, data)
}
}
diff --git a/src/com/workflowfm/pew/mongodb/bson/events/PiEventStartCodec.scala b/src/com/workflowfm/pew/mongodb/bson/events/PiEventStartCodec.scala
index ffee828..f036c1c 100644
--- a/src/com/workflowfm/pew/mongodb/bson/events/PiEventStartCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/events/PiEventStartCodec.scala
@@ -6,29 +6,29 @@ import com.workflowfm.pew.{PiEventStart, PiInstance}
import org.bson.{BsonReader, BsonWriter}
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
-class PiEventStartCodec[T]( piiCodec: Codec[PiInstance[T]], metaCodec: Codec[PiMetadataMap] )
- extends ClassCodec[PiEventStart[T]] {
+class PiEventStartCodec[T](piiCodec: Codec[PiInstance[T]], metaCodec: Codec[PiMetadataMap])
+ extends ClassCodec[PiEventStart[T]] {
- val piiN: String = "pii"
+ val piiN: String = "pii"
val timeN: String = "timestamp"
override def encodeBody(writer: BsonWriter, value: PiEventStart[T], ctx: EncoderContext): Unit = {
- writer.writeName( piiN )
- ctx.encodeWithChildContext( piiCodec, writer, value.i )
+ writer.writeName(piiN)
+ ctx.encodeWithChildContext(piiCodec, writer, value.i)
- writer.writeName( timeN )
- ctx.encodeWithChildContext( metaCodec, writer, value.metadata )
+ writer.writeName(timeN)
+ ctx.encodeWithChildContext(metaCodec, writer, value.metadata)
}
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiEventStart[T] = {
- reader.readName( piiN )
- val pii: PiInstance[T] = ctx.decodeWithChildContext( piiCodec, reader )
+ reader.readName(piiN)
+ val pii: PiInstance[T] = ctx.decodeWithChildContext(piiCodec, reader)
- reader.readName( timeN )
- val data: PiMetadataMap = ctx.decodeWithChildContext( metaCodec, reader )
-
- PiEventStart( pii, data )
+ reader.readName(timeN)
+ val data: PiMetadataMap = ctx.decodeWithChildContext(metaCodec, reader)
+
+ PiEventStart(pii, data)
}
}
diff --git a/src/com/workflowfm/pew/mongodb/bson/events/PiFailureAtomicProcessIsCompositeCodec.scala b/src/com/workflowfm/pew/mongodb/bson/events/PiFailureAtomicProcessIsCompositeCodec.scala
index ba0d49e..bbfca74 100644
--- a/src/com/workflowfm/pew/mongodb/bson/events/PiFailureAtomicProcessIsCompositeCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/events/PiFailureAtomicProcessIsCompositeCodec.scala
@@ -6,34 +6,38 @@ import com.workflowfm.pew.{PiFailureAtomicProcessIsComposite, PiInstance}
import org.bson.{BsonReader, BsonWriter}
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
-class PiFailureAtomicProcessIsCompositeCodec[T]( piiCodec: Codec[PiInstance[T]], metaCodec: Codec[PiMetadataMap] )
- extends ClassCodec[PiFailureAtomicProcessIsComposite[T]] {
+class PiFailureAtomicProcessIsCompositeCodec[T](piiCodec: Codec[PiInstance[T]], metaCodec: Codec[PiMetadataMap])
+ extends ClassCodec[PiFailureAtomicProcessIsComposite[T]] {
val piiIdN: String = "piiId"
- val procN: String = "proc"
- val timeN: String = "timestamp"
+ val procN: String = "proc"
+ val timeN: String = "timestamp"
- override def encodeBody(writer: BsonWriter, value: PiFailureAtomicProcessIsComposite[T], ctx: EncoderContext): Unit = {
+ override def encodeBody(
+ writer: BsonWriter,
+ value: PiFailureAtomicProcessIsComposite[T],
+ ctx: EncoderContext
+ ): Unit = {
- writer.writeName( piiIdN )
- ctx.encodeWithChildContext( piiCodec, writer, value.i )
+ writer.writeName(piiIdN)
+ ctx.encodeWithChildContext(piiCodec, writer, value.i)
- writer.writeString( procN, value.process )
+ writer.writeString(procN, value.process)
- writer.writeName( timeN )
- ctx.encodeWithChildContext( metaCodec, writer, value.metadata )
+ writer.writeName(timeN)
+ ctx.encodeWithChildContext(metaCodec, writer, value.metadata)
}
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiFailureAtomicProcessIsComposite[T] = {
- reader.readName( piiIdN )
- val pii: PiInstance[T] = ctx.decodeWithChildContext( piiCodec, reader )
+ reader.readName(piiIdN)
+ val pii: PiInstance[T] = ctx.decodeWithChildContext(piiCodec, reader)
- val proc: String = reader.readString( procN )
+ val proc: String = reader.readString(procN)
- reader.readName( timeN )
- val data: PiMetadataMap = ctx.decodeWithChildContext( metaCodec, reader )
-
- PiFailureAtomicProcessIsComposite( pii, proc, data )
+ reader.readName(timeN)
+ val data: PiMetadataMap = ctx.decodeWithChildContext(metaCodec, reader)
+
+ PiFailureAtomicProcessIsComposite(pii, proc, data)
}
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/mongodb/bson/events/PiFailureNoResultCodec.scala b/src/com/workflowfm/pew/mongodb/bson/events/PiFailureNoResultCodec.scala
index ab4de73..1b36ada 100644
--- a/src/com/workflowfm/pew/mongodb/bson/events/PiFailureNoResultCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/events/PiFailureNoResultCodec.scala
@@ -6,28 +6,28 @@ import com.workflowfm.pew.{PiFailureNoResult, PiInstance}
import org.bson.{BsonReader, BsonWriter}
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
-class PiFailureNoResultCodec[T]( piiCodec: Codec[PiInstance[T]], metaCodec: Codec[PiMetadataMap] )
- extends ClassCodec[PiFailureNoResult[T]] {
+class PiFailureNoResultCodec[T](piiCodec: Codec[PiInstance[T]], metaCodec: Codec[PiMetadataMap])
+ extends ClassCodec[PiFailureNoResult[T]] {
- val piiN: String = "pii"
+ val piiN: String = "pii"
val timeN: String = "timestamp"
override def encodeBody(writer: BsonWriter, value: PiFailureNoResult[T], ctx: EncoderContext): Unit = {
- writer.writeName( piiN )
- ctx.encodeWithChildContext( piiCodec, writer, value.i )
+ writer.writeName(piiN)
+ ctx.encodeWithChildContext(piiCodec, writer, value.i)
- writer.writeName( timeN )
- ctx.encodeWithChildContext( metaCodec, writer, value.metadata )
+ writer.writeName(timeN)
+ ctx.encodeWithChildContext(metaCodec, writer, value.metadata)
}
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiFailureNoResult[T] = {
- reader.readName( piiN )
- val pii: PiInstance[T] = ctx.decodeWithChildContext( piiCodec, reader )
+ reader.readName(piiN)
+ val pii: PiInstance[T] = ctx.decodeWithChildContext(piiCodec, reader)
- reader.readName( timeN )
- val data: PiMetadataMap = ctx.decodeWithChildContext( metaCodec, reader )
-
- PiFailureNoResult( pii, data )
+ reader.readName(timeN)
+ val data: PiMetadataMap = ctx.decodeWithChildContext(metaCodec, reader)
+
+ PiFailureNoResult(pii, data)
}
}
diff --git a/src/com/workflowfm/pew/mongodb/bson/events/PiFailureNoSuchInstanceCodec.scala b/src/com/workflowfm/pew/mongodb/bson/events/PiFailureNoSuchInstanceCodec.scala
index a48ff93..1cf3157 100644
--- a/src/com/workflowfm/pew/mongodb/bson/events/PiFailureNoSuchInstanceCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/events/PiFailureNoSuchInstanceCodec.scala
@@ -6,29 +6,29 @@ import com.workflowfm.pew.mongodb.bson.auto.ClassCodec
import org.bson.{BsonReader, BsonWriter}
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
-class PiFailureNoSuchInstanceCodec[T]( tCodec: Codec[T], metaCodec: Codec[PiMetadataMap] )
- extends ClassCodec[PiFailureNoSuchInstance[T]] {
+class PiFailureNoSuchInstanceCodec[T](tCodec: Codec[T], metaCodec: Codec[PiMetadataMap])
+ extends ClassCodec[PiFailureNoSuchInstance[T]] {
- val tIdN: String = "tId"
+ val tIdN: String = "tId"
val timeN: String = "timestamp"
override def encodeBody(writer: BsonWriter, value: PiFailureNoSuchInstance[T], ctx: EncoderContext): Unit = {
- writer.writeName( tIdN )
- ctx.encodeWithChildContext( tCodec, writer, value.id )
+ writer.writeName(tIdN)
+ ctx.encodeWithChildContext(tCodec, writer, value.id)
- writer.writeName( timeN )
- ctx.encodeWithChildContext( metaCodec, writer, value.metadata )
+ writer.writeName(timeN)
+ ctx.encodeWithChildContext(metaCodec, writer, value.metadata)
}
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiFailureNoSuchInstance[T] = {
- reader.readName( tIdN )
- val tId: T = ctx.decodeWithChildContext( tCodec, reader )
+ reader.readName(tIdN)
+ val tId: T = ctx.decodeWithChildContext(tCodec, reader)
- reader.readName( timeN )
- val data: PiMetadataMap = ctx.decodeWithChildContext( metaCodec, reader )
-
- PiFailureNoSuchInstance( tId, data )
+ reader.readName(timeN)
+ val data: PiMetadataMap = ctx.decodeWithChildContext(metaCodec, reader)
+
+ PiFailureNoSuchInstance(tId, data)
}
}
diff --git a/src/com/workflowfm/pew/mongodb/bson/events/PiFailureUnknownProcessCodec.scala b/src/com/workflowfm/pew/mongodb/bson/events/PiFailureUnknownProcessCodec.scala
index 00384a9..7e1c744 100644
--- a/src/com/workflowfm/pew/mongodb/bson/events/PiFailureUnknownProcessCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/events/PiFailureUnknownProcessCodec.scala
@@ -6,34 +6,34 @@ import com.workflowfm.pew.{PiFailureUnknownProcess, PiInstance}
import org.bson.{BsonReader, BsonWriter}
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
-class PiFailureUnknownProcessCodec[T]( piiCodec: Codec[PiInstance[T]], metaCodec: Codec[PiMetadataMap] )
- extends ClassCodec[PiFailureUnknownProcess[T]]{
+class PiFailureUnknownProcessCodec[T](piiCodec: Codec[PiInstance[T]], metaCodec: Codec[PiMetadataMap])
+ extends ClassCodec[PiFailureUnknownProcess[T]] {
- val piiN: String = "pii"
+ val piiN: String = "pii"
val atomicProcN: String = "proc"
- val timeN: String = "timestamp"
+ val timeN: String = "timestamp"
override def encodeBody(writer: BsonWriter, value: PiFailureUnknownProcess[T], ctx: EncoderContext): Unit = {
- writer.writeName( piiN )
- ctx.encodeWithChildContext( piiCodec, writer, value.i )
+ writer.writeName(piiN)
+ ctx.encodeWithChildContext(piiCodec, writer, value.i)
- writer.writeString( atomicProcN, value.process )
+ writer.writeString(atomicProcN, value.process)
- writer.writeName( timeN )
- ctx.encodeWithChildContext( metaCodec, writer, value.metadata )
+ writer.writeName(timeN)
+ ctx.encodeWithChildContext(metaCodec, writer, value.metadata)
}
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiFailureUnknownProcess[T] = {
- reader.readName( piiN )
- val pii: PiInstance[T] = ctx.decodeWithChildContext( piiCodec, reader )
+ reader.readName(piiN)
+ val pii: PiInstance[T] = ctx.decodeWithChildContext(piiCodec, reader)
- val proc: String = reader.readString( atomicProcN )
+ val proc: String = reader.readString(atomicProcN)
- reader.readName( timeN )
- val data: PiMetadataMap = ctx.decodeWithChildContext( metaCodec, reader )
-
- PiFailureUnknownProcess( pii, proc, data )
+ reader.readName(timeN)
+ val data: PiMetadataMap = ctx.decodeWithChildContext(metaCodec, reader)
+
+ PiFailureUnknownProcess(pii, proc, data)
}
}
diff --git a/src/com/workflowfm/pew/mongodb/bson/helper/BoxedUnitCodec.scala b/src/com/workflowfm/pew/mongodb/bson/helper/BoxedUnitCodec.scala
index f1bfb7f..999df19 100644
--- a/src/com/workflowfm/pew/mongodb/bson/helper/BoxedUnitCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/helper/BoxedUnitCodec.scala
@@ -7,6 +7,6 @@ import org.bson.{BsonReader, BsonWriter}
import scala.runtime.BoxedUnit
class BoxedUnitCodec extends ClassCodec[BoxedUnit] {
- override def decodeBody(reader: BsonReader, ctx: DecoderContext): BoxedUnit = BoxedUnit.UNIT
+ override def decodeBody(reader: BsonReader, ctx: DecoderContext): BoxedUnit = BoxedUnit.UNIT
override def encodeBody(writer: BsonWriter, value: BoxedUnit, ctx: EncoderContext): Unit = {}
}
diff --git a/src/com/workflowfm/pew/mongodb/bson/helper/EitherCodec.scala b/src/com/workflowfm/pew/mongodb/bson/helper/EitherCodec.scala
index f565de2..efefd44 100644
--- a/src/com/workflowfm/pew/mongodb/bson/helper/EitherCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/helper/EitherCodec.scala
@@ -4,43 +4,41 @@ import com.workflowfm.pew.mongodb.bson.auto.{ClassCodec, SuperclassCodec}
import org.bson.{BsonReader, BsonWriter}
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
-class LeftCodec( anyCodec: Codec[Any] )
- extends ClassCodec[Left[Any, Any]] {
+class LeftCodec(anyCodec: Codec[Any]) extends ClassCodec[Left[Any, Any]] {
val valueN: String = "value"
- override def decodeBody(reader: BsonReader, ctx: DecoderContext): Left[Any,Any] = {
- reader.readName( valueN )
- Left( ctx.decodeWithChildContext( anyCodec, reader ) )
+ override def decodeBody(reader: BsonReader, ctx: DecoderContext): Left[Any, Any] = {
+ reader.readName(valueN)
+ Left(ctx.decodeWithChildContext(anyCodec, reader))
}
- override def encodeBody(writer: BsonWriter, value: Left[Any,Any], ctx: EncoderContext): Unit = {
- writer.writeName( valueN )
- ctx.encodeWithChildContext( anyCodec, writer, value.value )
+ override def encodeBody(writer: BsonWriter, value: Left[Any, Any], ctx: EncoderContext): Unit = {
+ writer.writeName(valueN)
+ ctx.encodeWithChildContext(anyCodec, writer, value.value)
}
}
-class RightCodec( anyCodec: Codec[Any] )
- extends ClassCodec[Right[Any, Any]] {
+class RightCodec(anyCodec: Codec[Any]) extends ClassCodec[Right[Any, Any]] {
val valueN: String = "value"
- override def decodeBody(reader: BsonReader, ctx: DecoderContext): Right[Any,Any] = {
- reader.readName( valueN )
- Right( ctx.decodeWithChildContext( anyCodec, reader ) )
+ override def decodeBody(reader: BsonReader, ctx: DecoderContext): Right[Any, Any] = {
+ reader.readName(valueN)
+ Right(ctx.decodeWithChildContext(anyCodec, reader))
}
- override def encodeBody(writer: BsonWriter, value: Right[Any,Any], ctx: EncoderContext): Unit = {
- writer.writeName( valueN )
- ctx.encodeWithChildContext( anyCodec, writer, value.value )
+ override def encodeBody(writer: BsonWriter, value: Right[Any, Any], ctx: EncoderContext): Unit = {
+ writer.writeName(valueN)
+ ctx.encodeWithChildContext(anyCodec, writer, value.value)
}
}
object EitherCodec {
- def apply( anyCodec: Codec[Any] ): Codec[Either[Any, Any]] = {
+ def apply(anyCodec: Codec[Any]): Codec[Either[Any, Any]] = {
val codec = new SuperclassCodec[Either[Any, Any]]
- codec.updateWith( new LeftCodec( anyCodec ) )
- codec.updateWith( new RightCodec( anyCodec ) )
+ codec.updateWith(new LeftCodec(anyCodec))
+ codec.updateWith(new RightCodec(anyCodec))
codec
}
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/mongodb/bson/helper/ObjectIdCodec.scala b/src/com/workflowfm/pew/mongodb/bson/helper/ObjectIdCodec.scala
index 3061f5a..24947f2 100644
--- a/src/com/workflowfm/pew/mongodb/bson/helper/ObjectIdCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/helper/ObjectIdCodec.scala
@@ -4,15 +4,11 @@ import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
import org.bson.types.ObjectId
import org.bson.{BsonReader, BsonWriter}
-class ObjectIdCodec
- extends Codec[ObjectId]{
+class ObjectIdCodec extends Codec[ObjectId] {
- override def encode(writer: BsonWriter, value: ObjectId, ctx: EncoderContext): Unit
- = writer.writeObjectId( value )
+ override def encode(writer: BsonWriter, value: ObjectId, ctx: EncoderContext): Unit = writer.writeObjectId(value)
- override def decode(reader: BsonReader, ctx: DecoderContext): ObjectId
- = reader.readObjectId()
+ override def decode(reader: BsonReader, ctx: DecoderContext): ObjectId = reader.readObjectId()
- override def getEncoderClass: Class[ObjectId]
- = classOf[ObjectId]
+ override def getEncoderClass: Class[ObjectId] = classOf[ObjectId]
}
diff --git a/src/com/workflowfm/pew/mongodb/bson/helper/OptionCodec.scala b/src/com/workflowfm/pew/mongodb/bson/helper/OptionCodec.scala
index cb9493d..9be9a9b 100644
--- a/src/com/workflowfm/pew/mongodb/bson/helper/OptionCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/helper/OptionCodec.scala
@@ -5,31 +5,30 @@ import org.bson.{BsonReader, BsonWriter}
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
class NoneCodec extends ClassCodec[None.type] {
- override def decodeBody(reader: BsonReader, ctx: DecoderContext): None.type = None
+ override def decodeBody(reader: BsonReader, ctx: DecoderContext): None.type = None
override def encodeBody(writer: BsonWriter, value: None.type, ctx: EncoderContext): Unit = {}
}
-class SomeCodec( anyCodec: Codec[Any] )
- extends ClassCodec[Some[Any]] {
+class SomeCodec(anyCodec: Codec[Any]) extends ClassCodec[Some[Any]] {
val valueN: String = "value"
override def decodeBody(reader: BsonReader, ctx: DecoderContext): Some[Any] = {
- reader.readName( valueN )
- Some( ctx.decodeWithChildContext( anyCodec, reader ) )
+ reader.readName(valueN)
+ Some(ctx.decodeWithChildContext(anyCodec, reader))
}
override def encodeBody(writer: BsonWriter, value: Some[Any], ctx: EncoderContext): Unit = {
- writer.writeName( valueN )
- ctx.encodeWithChildContext( anyCodec, writer, value.value)
+ writer.writeName(valueN)
+ ctx.encodeWithChildContext(anyCodec, writer, value.value)
}
}
object OptionCodec {
- def apply( anyCodec: Codec[Any] ): Codec[Option[Any]] = {
+ def apply(anyCodec: Codec[Any]): Codec[Option[Any]] = {
val codec = new SuperclassCodec[Option[Any]]
- codec.updateWith( new NoneCodec )
- codec.updateWith( new SomeCodec( anyCodec ) )
+ codec.updateWith(new NoneCodec)
+ codec.updateWith(new SomeCodec(anyCodec))
codec
}
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/mongodb/bson/helper/Tuple2Codec.scala b/src/com/workflowfm/pew/mongodb/bson/helper/Tuple2Codec.scala
index 0ff6f15..b3947c0 100644
--- a/src/com/workflowfm/pew/mongodb/bson/helper/Tuple2Codec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/helper/Tuple2Codec.scala
@@ -4,29 +4,28 @@ import com.workflowfm.pew.mongodb.bson.auto.ClassCodec
import org.bson.{BsonReader, BsonWriter}
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
-class Tuple2Codec( anyCodec: Codec[Any] )
- extends ClassCodec[(Any,Any)] {
+class Tuple2Codec(anyCodec: Codec[Any]) extends ClassCodec[(Any, Any)] {
val _1N: String = "_1"
val _2N: String = "_2"
override def decodeBody(reader: BsonReader, ctx: DecoderContext): (Any, Any) = {
- reader.readName( _1N )
- val _1 = ctx.decodeWithChildContext( anyCodec, reader )
+ reader.readName(_1N)
+ val _1 = ctx.decodeWithChildContext(anyCodec, reader)
- reader.readName( _2N )
- val _2 = ctx.decodeWithChildContext( anyCodec, reader )
+ reader.readName(_2N)
+ val _2 = ctx.decodeWithChildContext(anyCodec, reader)
(_1, _2)
}
override def encodeBody(writer: BsonWriter, value: (Any, Any), ctx: EncoderContext): Unit = {
- writer.writeName( _1N )
- ctx.encodeWithChildContext( anyCodec, writer, value._1 )
+ writer.writeName(_1N)
+ ctx.encodeWithChildContext(anyCodec, writer, value._1)
- writer.writeName( _2N )
- ctx.encodeWithChildContext( anyCodec, writer, value._2 )
+ writer.writeName(_2N)
+ ctx.encodeWithChildContext(anyCodec, writer, value._2)
}
}
diff --git a/src/com/workflowfm/pew/mongodb/bson/pitypes/PiInstanceCodec.scala b/src/com/workflowfm/pew/mongodb/bson/pitypes/PiInstanceCodec.scala
index 2f60625..2bbe9ff 100644
--- a/src/com/workflowfm/pew/mongodb/bson/pitypes/PiInstanceCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/pitypes/PiInstanceCodec.scala
@@ -7,49 +7,48 @@ import org.bson.codecs._
import org.bson.codecs.configuration.{CodecConfigurationException, CodecRegistry}
import org.bson.types._
-class PiInstanceCodec(registry:CodecRegistry,processes:PiProcessStore) extends Codec[PiInstance[ObjectId]] {
- val stateCodec:Codec[PiState] = registry.get(classOf[PiState])
+class PiInstanceCodec(registry: CodecRegistry, processes: PiProcessStore) extends Codec[PiInstance[ObjectId]] {
+ val stateCodec: Codec[PiState] = registry.get(classOf[PiState])
import BsonUtil._
- override def encode(writer: BsonWriter, value: PiInstance[ObjectId], encoderContext: EncoderContext): Unit = {
- writer.writeStartDocument()
+ override def encode(writer: BsonWriter, value: PiInstance[ObjectId], encoderContext: EncoderContext): Unit = {
+ writer.writeStartDocument()
writer.writeName("_id")
writer.writeObjectId(value.id)
- writeArray( writer, "calls", value.called ) {
+ writeArray(writer, "calls", value.called) {
writer.writeInt32
}
- writer.writeName("process")
- writer.writeString(value.process.iname)
-
- writer.writeName("state")
- encoderContext.encodeWithChildContext(stateCodec,writer,value.state)
+ writer.writeName("process")
+ writer.writeString(value.process.iname)
+
+ writer.writeName("state")
+ encoderContext.encodeWithChildContext(stateCodec, writer, value.state)
writer.writeEndDocument()
}
-
+
override def getEncoderClass: Class[PiInstance[ObjectId]] = classOf[PiInstance[ObjectId]]
override def decode(reader: BsonReader, decoderContext: DecoderContext): PiInstance[ObjectId] = {
- reader.readStartDocument()
- reader.readName("_id")
- val id = reader.readObjectId()
+ reader.readStartDocument()
+ reader.readName("_id")
+ val id = reader.readObjectId()
- val calls: List[Int]
- = readArray( reader, "calls" ) {
- () => reader.readInt32()
- }
+ val calls: List[Int] = readArray(reader, "calls") { () =>
+ reader.readInt32()
+ }
reader.readName("process")
val iname = reader.readString()
- val p = processes.getOrElse(iname,throw new CodecConfigurationException("Unknown PiProcess: " + iname))
-
+ val p = processes.getOrElse(iname, throw new CodecConfigurationException("Unknown PiProcess: " + iname))
+
reader.readName("state")
- val state = decoderContext.decodeWithChildContext(stateCodec,reader)
-
- reader.readEndDocument()
- PiInstance( id, calls, p, state )
+ val state = decoderContext.decodeWithChildContext(stateCodec, reader)
+
+ reader.readEndDocument()
+ PiInstance(id, calls, p, state)
}
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/mongodb/bson/pitypes/PiMetadataMapCodec.scala b/src/com/workflowfm/pew/mongodb/bson/pitypes/PiMetadataMapCodec.scala
index cc7d5d7..3b4dec7 100644
--- a/src/com/workflowfm/pew/mongodb/bson/pitypes/PiMetadataMapCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/pitypes/PiMetadataMapCodec.scala
@@ -5,37 +5,35 @@ import com.workflowfm.pew.mongodb.bson.auto.ClassCodec
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
import org.bson.{BsonReader, BsonWriter}
-class PiMetadataMapCodec( anyCodec: Codec[Any] )
- extends ClassCodec[PiMetadataMap] {
+class PiMetadataMapCodec(anyCodec: Codec[Any]) extends ClassCodec[PiMetadataMap] {
import com.workflowfm.pew.mongodb.bson.BsonUtil._
val fieldsN: String = "fields"
- val typeN: String = "datatype"
- val valueN: String = "value"
+ val typeN: String = "datatype"
+ val valueN: String = "value"
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiMetadataMap = {
- readArray( reader, fieldsN ) {
- () =>
- reader.readStartDocument()
- val datatype: String = reader.readString( typeN )
+ readArray(reader, fieldsN) { () =>
+ reader.readStartDocument()
+ val datatype: String = reader.readString(typeN)
- reader.readName( valueN )
- val value: Any = ctx.decodeWithChildContext( anyCodec, reader )
- reader.readEndDocument()
+ reader.readName(valueN)
+ val value: Any = ctx.decodeWithChildContext(anyCodec, reader)
+ reader.readEndDocument()
- datatype -> value
+ datatype -> value
}
}
override def encodeBody(writer: BsonWriter, value: PiMetadataMap, ctx: EncoderContext): Unit = {
- writeArray( writer, fieldsN, value.to ) {
+ writeArray(writer, fieldsN, value.to) {
case (key, data) =>
writer.writeStartDocument()
- writer.writeString( typeN, key )
+ writer.writeString(typeN, key)
- writer.writeName( valueN )
- ctx.encodeWithChildContext( anyCodec, writer, data )
+ writer.writeName(valueN)
+ ctx.encodeWithChildContext(anyCodec, writer, data)
writer.writeEndDocument()
}
}
diff --git a/src/com/workflowfm/pew/mongodb/bson/pitypes/PiObjectCodec.scala b/src/com/workflowfm/pew/mongodb/bson/pitypes/PiObjectCodec.scala
index 2337c24..8f41bd7 100644
--- a/src/com/workflowfm/pew/mongodb/bson/pitypes/PiObjectCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/pitypes/PiObjectCodec.scala
@@ -10,20 +10,19 @@ import org.mongodb.scala.bson.codecs.DEFAULT_CODEC_REGISTRY
import scala.collection.mutable.Queue
-class PiItemCodec( anyCodec: Codec[Any] )
- extends ClassCodec[PiItem[Any]] {
+class PiItemCodec(anyCodec: Codec[Any]) extends ClassCodec[PiItem[Any]] {
val iN: String = "i"
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiItem[Any] = {
- reader.readName( iN )
- val i: Any = ctx.decodeWithChildContext( anyCodec, reader )
- PiItem( i )
+ reader.readName(iN)
+ val i: Any = ctx.decodeWithChildContext(anyCodec, reader)
+ PiItem(i)
}
override def encodeBody(writer: BsonWriter, value: PiItem[Any], ctx: EncoderContext): Unit = {
- writer.writeName( iN )
- ctx.encodeWithChildContext( anyCodec, writer, value.i )
+ writer.writeName(iN)
+ ctx.encodeWithChildContext(anyCodec, writer, value.i)
}
}
@@ -31,206 +30,197 @@ class ChanCodec extends ClassCodec[Chan] {
val sN: String = "s"
- override def decodeBody(reader: BsonReader, ctx: DecoderContext): Chan
- = Chan( reader.readString( sN ) )
+ override def decodeBody(reader: BsonReader, ctx: DecoderContext): Chan = Chan(reader.readString(sN))
- override def encodeBody(writer: BsonWriter, value: Chan, ctx: EncoderContext): Unit
- = writer.writeString( sN, value.s )
+ override def encodeBody(writer: BsonWriter, value: Chan, ctx: EncoderContext): Unit = writer.writeString(sN, value.s)
}
-class PiPairCodec( objCodec: Codec[PiObject] )
- extends ClassCodec[PiPair] {
+class PiPairCodec(objCodec: Codec[PiObject]) extends ClassCodec[PiPair] {
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiPair = {
reader.readName("l")
- val l = ctx.decodeWithChildContext( objCodec, reader )
+ val l = ctx.decodeWithChildContext(objCodec, reader)
reader.readName("r")
- val r = ctx.decodeWithChildContext( objCodec, reader )
- PiPair(l,r)
+ val r = ctx.decodeWithChildContext(objCodec, reader)
+ PiPair(l, r)
}
override def encodeBody(writer: BsonWriter, value: PiPair, ctx: EncoderContext): Unit = {
writer.writeName("l")
- ctx.encodeWithChildContext( objCodec, writer, value.l )
+ ctx.encodeWithChildContext(objCodec, writer, value.l)
writer.writeName("r")
- ctx.encodeWithChildContext( objCodec, writer, value.r )
+ ctx.encodeWithChildContext(objCodec, writer, value.r)
}
}
-class PiOptCodec( objCodec: Codec[PiObject] )
- extends ClassCodec[PiOpt] {
+class PiOptCodec(objCodec: Codec[PiObject]) extends ClassCodec[PiOpt] {
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiOpt = {
reader.readName("l")
- val l = ctx.decodeWithChildContext( objCodec, reader )
+ val l = ctx.decodeWithChildContext(objCodec, reader)
reader.readName("r")
- val r = ctx.decodeWithChildContext( objCodec, reader )
- PiOpt(l,r)
+ val r = ctx.decodeWithChildContext(objCodec, reader)
+ PiOpt(l, r)
}
override def encodeBody(writer: BsonWriter, value: PiOpt, ctx: EncoderContext): Unit = {
writer.writeName("l")
- ctx.encodeWithChildContext( objCodec, writer, value.l )
+ ctx.encodeWithChildContext(objCodec, writer, value.l)
writer.writeName("r")
- ctx.encodeWithChildContext( objCodec, writer, value.r )
+ ctx.encodeWithChildContext(objCodec, writer, value.r)
}
}
-class PiLeftCodec( objCodec: Codec[PiObject] )
- extends ClassCodec[PiLeft] {
+class PiLeftCodec(objCodec: Codec[PiObject]) extends ClassCodec[PiLeft] {
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiLeft = {
reader.readName("l")
- val l = ctx.decodeWithChildContext( objCodec, reader )
+ val l = ctx.decodeWithChildContext(objCodec, reader)
PiLeft(l)
}
override def encodeBody(writer: BsonWriter, value: PiLeft, ctx: EncoderContext): Unit = {
writer.writeName("l")
- ctx.encodeWithChildContext( objCodec, writer, value.l )
+ ctx.encodeWithChildContext(objCodec, writer, value.l)
}
}
-class PiRightCodec( objCodec: Codec[PiObject] )
- extends ClassCodec[PiRight] {
+class PiRightCodec(objCodec: Codec[PiObject]) extends ClassCodec[PiRight] {
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiRight = {
reader.readName("r")
- val r = ctx.decodeWithChildContext( objCodec, reader )
+ val r = ctx.decodeWithChildContext(objCodec, reader)
PiRight(r)
}
override def encodeBody(writer: BsonWriter, value: PiRight, ctx: EncoderContext): Unit = {
writer.writeName("r")
- ctx.encodeWithChildContext( objCodec, writer, value.r )
+ ctx.encodeWithChildContext(objCodec, writer, value.r)
}
}
-class PiObjectCodec( registry: CodecRegistry = DEFAULT_CODEC_REGISTRY )
- extends SuperclassCodec[PiObject] {
+class PiObjectCodec(registry: CodecRegistry = DEFAULT_CODEC_REGISTRY) extends SuperclassCodec[PiObject] {
- lazy val anyCodec: Codec[Any] = new AnyCodec( registry )
+ lazy val anyCodec: Codec[Any] = new AnyCodec(registry)
- updateWith( new PiItemCodec( anyCodec ) )
- updateWith( new ChanCodec() )
- updateWith( new PiPairCodec( this ) )
- updateWith( new PiOptCodec( this ) )
- updateWith( new PiLeftCodec( this ) )
- updateWith( new PiRightCodec( this ) )
+ updateWith(new PiItemCodec(anyCodec))
+ updateWith(new ChanCodec())
+ updateWith(new PiPairCodec(this))
+ updateWith(new PiOptCodec(this))
+ updateWith(new PiLeftCodec(this))
+ updateWith(new PiRightCodec(this))
}
-class ChanMapCodec(registry:CodecRegistry) extends Codec[ChanMap] {
- val chanCodec:Codec[Chan] = registry.get(classOf[Chan])
- val piobjCodec:Codec[PiObject] = registry.get(classOf[PiObject])
-
- override def encode(writer: BsonWriter, value: ChanMap, encoderContext: EncoderContext): Unit = {
- writer.writeStartDocument()
- writer.writeName("ChanMap")
+class ChanMapCodec(registry: CodecRegistry) extends Codec[ChanMap] {
+ val chanCodec: Codec[Chan] = registry.get(classOf[Chan])
+ val piobjCodec: Codec[PiObject] = registry.get(classOf[PiObject])
+
+ override def encode(writer: BsonWriter, value: ChanMap, encoderContext: EncoderContext): Unit = {
+ writer.writeStartDocument()
+ writer.writeName("ChanMap")
writer.writeStartArray()
- for ((k,v) <- value.map) {
- writer.writeStartDocument()
- writer.writeName("k")
- encoderContext.encodeWithChildContext(chanCodec,writer,k)
- writer.writeName("v")
- encoderContext.encodeWithChildContext(piobjCodec,writer,v)
- writer.writeEndDocument()
- }
- writer.writeEndArray()
- writer.writeEndDocument()
- }
-
+ for ((k, v) <- value.map) {
+ writer.writeStartDocument()
+ writer.writeName("k")
+ encoderContext.encodeWithChildContext(chanCodec, writer, k)
+ writer.writeName("v")
+ encoderContext.encodeWithChildContext(piobjCodec, writer, v)
+ writer.writeEndDocument()
+ }
+ writer.writeEndArray()
+ writer.writeEndDocument()
+ }
+
override def getEncoderClass: Class[ChanMap] = classOf[ChanMap]
override def decode(reader: BsonReader, decoderContext: DecoderContext): ChanMap = {
- reader.readStartDocument()
- reader.readName("ChanMap")
+ reader.readStartDocument()
+ reader.readName("ChanMap")
reader.readStartArray()
- var args:Queue[(Chan,PiObject)] = Queue()
- while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) {
- reader.readStartDocument()
- reader.readName("k")
- val k = decoderContext.decodeWithChildContext(chanCodec,reader)
- reader.readName("v")
- val v = decoderContext.decodeWithChildContext(piobjCodec,reader)
- reader.readEndDocument()
- args+= ((k,v))
- }
- reader.readEndArray()
- reader.readEndDocument()
- ChanMap(args:_*)
+ var args: Queue[(Chan, PiObject)] = Queue()
+ while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) {
+ reader.readStartDocument()
+ reader.readName("k")
+ val k = decoderContext.decodeWithChildContext(chanCodec, reader)
+ reader.readName("v")
+ val v = decoderContext.decodeWithChildContext(piobjCodec, reader)
+ reader.readEndDocument()
+ args += ((k, v))
+ }
+ reader.readEndArray()
+ reader.readEndDocument()
+ ChanMap(args: _*)
}
}
-class PiResourceCodec(registry:CodecRegistry) extends Codec[PiResource] {
- val chanCodec:Codec[Chan] = registry.get(classOf[Chan])
- val piobjCodec:Codec[PiObject] = registry.get(classOf[PiObject])
-
- override def encode(writer: BsonWriter, value: PiResource, encoderContext: EncoderContext): Unit = {
- writer.writeStartDocument()
+class PiResourceCodec(registry: CodecRegistry) extends Codec[PiResource] {
+ val chanCodec: Codec[Chan] = registry.get(classOf[Chan])
+ val piobjCodec: Codec[PiObject] = registry.get(classOf[PiObject])
+
+ override def encode(writer: BsonWriter, value: PiResource, encoderContext: EncoderContext): Unit = {
+ writer.writeStartDocument()
// writer.writeName("_t")
// writer.writeString("PiResource")
writer.writeName("obj")
- encoderContext.encodeWithChildContext(piobjCodec,writer,value.obj)
- writer.writeName("c")
- encoderContext.encodeWithChildContext(chanCodec,writer,value.c)
- writer.writeEndDocument()
+ encoderContext.encodeWithChildContext(piobjCodec, writer, value.obj)
+ writer.writeName("c")
+ encoderContext.encodeWithChildContext(chanCodec, writer, value.c)
+ writer.writeEndDocument()
}
-
+
override def getEncoderClass: Class[PiResource] = classOf[PiResource]
override def decode(reader: BsonReader, decoderContext: DecoderContext): PiResource = {
- reader.readStartDocument()
+ reader.readStartDocument()
// reader.readName("_t")
// reader.readString("PiResource")
reader.readName("obj")
- val obj = decoderContext.decodeWithChildContext(piobjCodec,reader)
- reader.readName("c")
- val c = decoderContext.decodeWithChildContext(chanCodec,reader)
- reader.readEndDocument()
- PiResource(obj,c)
+ val obj = decoderContext.decodeWithChildContext(piobjCodec, reader)
+ reader.readName("c")
+ val c = decoderContext.decodeWithChildContext(chanCodec, reader)
+ reader.readEndDocument()
+ PiResource(obj, c)
}
}
-class PiFutureCodec(registry:CodecRegistry) extends Codec[PiFuture] {
- val chanCodec:Codec[Chan] = registry.get(classOf[Chan])
- val resourceCodec:Codec[PiResource] = registry.get(classOf[PiResource])
+class PiFutureCodec(registry: CodecRegistry) extends Codec[PiFuture] {
+ val chanCodec: Codec[Chan] = registry.get(classOf[Chan])
+ val resourceCodec: Codec[PiResource] = registry.get(classOf[PiResource])
import BsonUtil._
- override def encode(writer: BsonWriter, value: PiFuture, encoderContext: EncoderContext): Unit = {
- writer.writeStartDocument()
+ override def encode(writer: BsonWriter, value: PiFuture, encoderContext: EncoderContext): Unit = {
+ writer.writeStartDocument()
// writer.writeName("_t")
// writer.writeString("PiFuture")
writer.writeName("fun")
writer.writeString(value.fun)
writer.writeName("oc")
- encoderContext.encodeWithChildContext(chanCodec,writer,value.outChan)
+ encoderContext.encodeWithChildContext(chanCodec, writer, value.outChan)
- writeArray( writer, "args", value.args ) {
- encoderContext.encodeWithChildContext( resourceCodec, writer, _ )
+ writeArray(writer, "args", value.args) {
+ encoderContext.encodeWithChildContext(resourceCodec, writer, _)
}
- writer.writeEndDocument()
+ writer.writeEndDocument()
}
-
+
override def getEncoderClass: Class[PiFuture] = classOf[PiFuture]
override def decode(reader: BsonReader, decoderContext: DecoderContext): PiFuture = {
- reader.readStartDocument()
+ reader.readStartDocument()
// reader.readName("_t")
// reader.readString("PiFuture")
- reader.readName("fun")
- val f = reader.readString()
- reader.readName("oc")
- val oc = decoderContext.decodeWithChildContext(chanCodec,reader)
+ reader.readName("fun")
+ val f = reader.readString()
+ reader.readName("oc")
+ val oc = decoderContext.decodeWithChildContext(chanCodec, reader)
- val args: List[PiResource]
- = readArray( reader, "args" ) { () =>
- decoderContext.decodeWithChildContext( resourceCodec, reader )
- }
+ val args: List[PiResource] = readArray(reader, "args") { () =>
+ decoderContext.decodeWithChildContext(resourceCodec, reader)
+ }
- reader.readEndDocument()
- PiFuture( f, oc, args )
+ reader.readEndDocument()
+ PiFuture(f, oc, args)
}
}
-
diff --git a/src/com/workflowfm/pew/mongodb/bson/pitypes/PiProcessCodec.scala b/src/com/workflowfm/pew/mongodb/bson/pitypes/PiProcessCodec.scala
index 1e25bf5..b08c392 100644
--- a/src/com/workflowfm/pew/mongodb/bson/pitypes/PiProcessCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/pitypes/PiProcessCodec.scala
@@ -5,35 +5,33 @@ import org.bson.codecs.configuration.CodecConfigurationException
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
import org.bson.{BsonReader, BsonWriter}
-class PiProcessCodec( store: PiProcessStore )
- extends Codec[PiProcess] {
+class PiProcessCodec(store: PiProcessStore) extends Codec[PiProcess] {
val processNameN = "name"
- def getPiProcess( name: String ): PiProcess = {
- store.get( name ) match {
- case None => throw new CodecConfigurationException(s"Unknown PiProcess: $name")
- case Some( proc ) => proc
+ def getPiProcess(name: String): PiProcess = {
+ store.get(name) match {
+ case None => throw new CodecConfigurationException(s"Unknown PiProcess: $name")
+ case Some(proc) => proc
}
}
override def encode(writer: BsonWriter, value: PiProcess, ctx: EncoderContext): Unit = {
val processName: String = value.iname
- require( value == getPiProcess( processName ), s"PiProcess '$processName' couldn't be recovered." )
+ require(value == getPiProcess(processName), s"PiProcess '$processName' couldn't be recovered.")
writer.writeStartDocument()
- writer.writeString( processNameN, processName )
+ writer.writeString(processNameN, processName)
writer.writeEndDocument()
}
override def decode(reader: BsonReader, ctx: DecoderContext): PiProcess = {
reader.readStartDocument()
- val processName: String = reader.readString( processNameN )
+ val processName: String = reader.readString(processNameN)
reader.readEndDocument()
- getPiProcess( processName )
+ getPiProcess(processName)
}
- override def getEncoderClass: Class[PiProcess]
- = classOf[PiProcess]
+ override def getEncoderClass: Class[PiProcess] = classOf[PiProcess]
}
diff --git a/src/com/workflowfm/pew/mongodb/bson/pitypes/PiStateCodec.scala b/src/com/workflowfm/pew/mongodb/bson/pitypes/PiStateCodec.scala
index 3b788c2..429c5c0 100644
--- a/src/com/workflowfm/pew/mongodb/bson/pitypes/PiStateCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/pitypes/PiStateCodec.scala
@@ -6,149 +6,145 @@ import org.bson._
import org.bson.codecs._
import org.bson.codecs.configuration.{CodecConfigurationException, CodecRegistry}
-class PiStateCodec(registry:CodecRegistry,processes:PiProcessStore) extends Codec[PiState] {
- val chanCodec:Codec[Chan] = registry.get(classOf[Chan])
- val termCodec:Codec[Term] = registry.get(classOf[Term])
- val futureCodec:Codec[PiFuture] = registry.get(classOf[PiFuture])
- val cmapCodec:Codec[ChanMap] = registry.get(classOf[ChanMap])
+class PiStateCodec(registry: CodecRegistry, processes: PiProcessStore) extends Codec[PiState] {
+ val chanCodec: Codec[Chan] = registry.get(classOf[Chan])
+ val termCodec: Codec[Term] = registry.get(classOf[Term])
+ val futureCodec: Codec[PiFuture] = registry.get(classOf[PiFuture])
+ val cmapCodec: Codec[ChanMap] = registry.get(classOf[ChanMap])
import BsonUtil._
- override def encode(writer: BsonWriter, value: PiState, encoderContext: EncoderContext): Unit = {
- writer.writeStartDocument()
+ override def encode(writer: BsonWriter, value: PiState, encoderContext: EncoderContext): Unit = {
+ writer.writeStartDocument()
// writer.writeName("_t")
// writer.writeString("PiState")
-
+
writer.writeName("inputs")
writer.writeStartArray()
- for ((c,i) <- value.inputs) {
+ for ((c, i) <- value.inputs) {
writer.writeStartDocument()
writer.writeName("c")
- encoderContext.encodeWithChildContext(chanCodec,writer,c)
+ encoderContext.encodeWithChildContext(chanCodec, writer, c)
writer.writeName("i")
- encoderContext.encodeWithChildContext(termCodec,writer,i)
+ encoderContext.encodeWithChildContext(termCodec, writer, i)
writer.writeEndDocument()
}
- writer.writeEndArray()
+ writer.writeEndArray()
- writer.writeName("outputs")
+ writer.writeName("outputs")
writer.writeStartArray()
- for ((c,o) <- value.outputs) {
+ for ((c, o) <- value.outputs) {
writer.writeStartDocument()
writer.writeName("c")
- encoderContext.encodeWithChildContext(chanCodec,writer,c)
+ encoderContext.encodeWithChildContext(chanCodec, writer, c)
writer.writeName("o")
- encoderContext.encodeWithChildContext(termCodec,writer,o)
+ encoderContext.encodeWithChildContext(termCodec, writer, o)
writer.writeEndDocument()
}
- writer.writeEndArray()
-
- writer.writeName("calls")
+ writer.writeEndArray()
+
+ writer.writeName("calls")
writer.writeStartArray()
- for (f <- value.calls) encoderContext.encodeWithChildContext(futureCodec,writer,f)
- writer.writeEndArray()
-
- writer.writeName("threads")
+ for (f <- value.calls) encoderContext.encodeWithChildContext(futureCodec, writer, f)
+ writer.writeEndArray()
+
+ writer.writeName("threads")
writer.writeStartArray()
- for ((i,t) <- value.threads) {
+ for ((i, t) <- value.threads) {
writer.writeStartDocument()
writer.writeName("i")
writer.writeInt32(i)
writer.writeName("t")
- encoderContext.encodeWithChildContext(futureCodec,writer,t)
+ encoderContext.encodeWithChildContext(futureCodec, writer, t)
writer.writeEndDocument()
}
- writer.writeEndArray()
-
- writer.writeName("tCtr")
- writer.writeInt32(value.threadCtr)
-
- writer.writeName("fCtr")
- writer.writeInt32(value.freshCtr)
-
- writer.writeName("procs")
+ writer.writeEndArray()
+
+ writer.writeName("tCtr")
+ writer.writeInt32(value.threadCtr)
+
+ writer.writeName("fCtr")
+ writer.writeInt32(value.freshCtr)
+
+ writer.writeName("procs")
writer.writeStartArray()
- for ((_,p) <- value.processes) writer.writeString(p.iname)
- writer.writeEndArray()
-
- writer.writeName("map")
- encoderContext.encodeWithChildContext(cmapCodec,writer,value.resources)
-
- writer.writeEndDocument()
+ for ((_, p) <- value.processes) writer.writeString(p.iname)
+ writer.writeEndArray()
+
+ writer.writeName("map")
+ encoderContext.encodeWithChildContext(cmapCodec, writer, value.resources)
+
+ writer.writeEndDocument()
}
-
+
override def getEncoderClass: Class[PiState] = classOf[PiState]
override def decode(reader: BsonReader, decoderContext: DecoderContext): PiState = {
- reader.readStartDocument()
+ reader.readStartDocument()
// reader.readName("_t")
// reader.readString("PiState")
- val inputs: List[(Chan, Input)]
- = readArray( reader, "inputs" ) { () =>
- reader.readStartDocument()
- reader.readName("c")
- val c = decoderContext.decodeWithChildContext(chanCodec,reader)
- reader.readName("i")
- val i = decoderContext.decodeWithChildContext(termCodec,reader).asInstanceOf[Input] // TODO handle exception
- reader.readEndDocument()
- (c,i)
- }
+ val inputs: List[(Chan, Input)] = readArray(reader, "inputs") { () =>
+ reader.readStartDocument()
+ reader.readName("c")
+ val c = decoderContext.decodeWithChildContext(chanCodec, reader)
+ reader.readName("i")
+ val i = decoderContext.decodeWithChildContext(termCodec, reader).asInstanceOf[Input] // TODO handle exception
+ reader.readEndDocument()
+ (c, i)
+ }
- val outputs: List[(Chan, Output)]
- = readArray( reader, "outputs" ) { () =>
- reader.readStartDocument()
- reader.readName("c")
- val c = decoderContext.decodeWithChildContext(chanCodec,reader)
- reader.readName("o")
- val o = decoderContext.decodeWithChildContext(termCodec,reader).asInstanceOf[Output] // TODO handle exception
- reader.readEndDocument()
- (c,o)
- }
+ val outputs: List[(Chan, Output)] = readArray(reader, "outputs") { () =>
+ reader.readStartDocument()
+ reader.readName("c")
+ val c = decoderContext.decodeWithChildContext(chanCodec, reader)
+ reader.readName("o")
+ val o = decoderContext.decodeWithChildContext(termCodec, reader).asInstanceOf[Output] // TODO handle exception
+ reader.readEndDocument()
+ (c, o)
+ }
- val calls: List[PiFuture]
- = readArray( reader, "calls" ) { () =>
- decoderContext.decodeWithChildContext(futureCodec,reader)
- }
+ val calls: List[PiFuture] = readArray(reader, "calls") { () =>
+ decoderContext.decodeWithChildContext(futureCodec, reader)
+ }
+
+ val threads: List[(Int, PiFuture)] = readArray(reader, "threads") { () =>
+ reader.readStartDocument()
+ reader.readName("i")
+ val i = reader.readInt32()
+ reader.readName("t")
+ val t = decoderContext.decodeWithChildContext(futureCodec, reader)
+ reader.readEndDocument()
+ (i, t)
+ }
- val threads: List[(Int, PiFuture)]
- = readArray( reader, "threads" ) { () =>
- reader.readStartDocument()
- reader.readName("i")
- val i = reader.readInt32()
- reader.readName("t")
- val t = decoderContext.decodeWithChildContext(futureCodec,reader)
- reader.readEndDocument()
- (i,t)
- }
-
reader.readName("tCtr")
val tCtr = reader.readInt32()
-
+
reader.readName("fCtr")
val fCtr = reader.readInt32()
- val procs: List[PiProcess]
- = readArray( reader, "procs" ) { () =>
- val processName: String = reader.readString()
- processes.get( processName ) match {
- case None => throw new CodecConfigurationException("Unknown PiProcess: " + processName)
- case Some( process ) => process
- }
+ val procs: List[PiProcess] = readArray(reader, "procs") { () =>
+ val processName: String = reader.readString()
+ processes.get(processName) match {
+ case None => throw new CodecConfigurationException("Unknown PiProcess: " + processName)
+ case Some(process) => process
}
-
+ }
+
reader.readName("map")
- val cMap = decoderContext.decodeWithChildContext(cmapCodec,reader)
-
- reader.readEndDocument()
- PiState(
- Map(inputs:_*),
- Map(outputs:_*),
+ val cMap = decoderContext.decodeWithChildContext(cmapCodec, reader)
+
+ reader.readEndDocument()
+ PiState(
+ Map(inputs: _*),
+ Map(outputs: _*),
calls,
- Map(threads:_*),
- tCtr,fCtr,
- PiProcessStore.mapOf(procs:_*),
+ Map(threads: _*),
+ tCtr,
+ fCtr,
+ PiProcessStore.mapOf(procs: _*),
cMap
)
}
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/mongodb/bson/pitypes/TermCodec.scala b/src/com/workflowfm/pew/mongodb/bson/pitypes/TermCodec.scala
index bb13f5e..0c18aff 100644
--- a/src/com/workflowfm/pew/mongodb/bson/pitypes/TermCodec.scala
+++ b/src/com/workflowfm/pew/mongodb/bson/pitypes/TermCodec.scala
@@ -9,35 +9,36 @@ import org.mongodb.scala.bson.codecs.DEFAULT_CODEC_REGISTRY
import scala.collection.mutable.ArrayBuffer
-class TermCodec(registry: CodecRegistry) extends Codec[Term] {
- def this() = this(CodecRegistries.fromRegistries(CodecRegistries.fromCodecs(new PiObjectCodec),DEFAULT_CODEC_REGISTRY))
+class TermCodec(registry: CodecRegistry) extends Codec[Term] {
+ def this() =
+ this(CodecRegistries.fromRegistries(CodecRegistries.fromCodecs(new PiObjectCodec), DEFAULT_CODEC_REGISTRY))
import BsonUtil._
- val piobjCodec:Codec[PiObject] = registry.get(classOf[PiObject])
-
+ val piobjCodec: Codec[PiObject] = registry.get(classOf[PiObject])
+
override def encode(writer: BsonWriter, value: Term, encoderContext: EncoderContext): Unit = {
writer.writeStartDocument()
writer.writeName("_t")
value match {
- case Devour(c,v) => {
+ case Devour(c, v) => {
writer.writeString("Devour")
writer.writeName("c")
writer.writeString(c)
writer.writeName("v")
writer.writeString(v)
}
- case In(c,v,cont) => {
- writer.writeString("In")
+ case In(c, v, cont) => {
+ writer.writeString("In")
writer.writeName("c")
writer.writeString(c)
writer.writeName("v")
writer.writeString(v)
writer.writeName("t")
- encoderContext.encodeWithChildContext(this,writer,cont)
+ encoderContext.encodeWithChildContext(this, writer, cont)
}
- case ParIn(c,lv,rv,left,right) => {
- writer.writeString("ParIn")
+ case ParIn(c, lv, rv, left, right) => {
+ writer.writeString("ParIn")
writer.writeName("c")
writer.writeString(c)
writer.writeName("cl")
@@ -45,12 +46,12 @@ class TermCodec(registry: CodecRegistry) extends Codec[Term] {
writer.writeName("cr")
writer.writeString(rv)
writer.writeName("l")
- encoderContext.encodeWithChildContext(this,writer,left)
+ encoderContext.encodeWithChildContext(this, writer, left)
writer.writeName("r")
- encoderContext.encodeWithChildContext(this,writer,right)
+ encoderContext.encodeWithChildContext(this, writer, right)
}
- case ParInI(c,lv,rv,cont) => {
- writer.writeString("ParInI")
+ case ParInI(c, lv, rv, cont) => {
+ writer.writeString("ParInI")
writer.writeName("c")
writer.writeString(c)
writer.writeName("cl")
@@ -58,10 +59,10 @@ class TermCodec(registry: CodecRegistry) extends Codec[Term] {
writer.writeName("cr")
writer.writeString(rv)
writer.writeName("t")
- encoderContext.encodeWithChildContext(this,writer,cont)
- }
- case WithIn(c,lv,rv,left,right) => {
- writer.writeString("WithIn")
+ encoderContext.encodeWithChildContext(this, writer, cont)
+ }
+ case WithIn(c, lv, rv, left, right) => {
+ writer.writeString("WithIn")
writer.writeName("c")
writer.writeString(c)
writer.writeName("cl")
@@ -69,19 +70,19 @@ class TermCodec(registry: CodecRegistry) extends Codec[Term] {
writer.writeName("cr")
writer.writeString(rv)
writer.writeName("l")
- encoderContext.encodeWithChildContext(this,writer,left)
+ encoderContext.encodeWithChildContext(this, writer, left)
writer.writeName("r")
- encoderContext.encodeWithChildContext(this,writer,right)
- }
- case Out(c,obj) => {
- writer.writeString("Out")
+ encoderContext.encodeWithChildContext(this, writer, right)
+ }
+ case Out(c, obj) => {
+ writer.writeString("Out")
writer.writeName("c")
writer.writeString(c)
writer.writeName("v")
- encoderContext.encodeWithChildContext(piobjCodec,writer,obj)
+ encoderContext.encodeWithChildContext(piobjCodec, writer, obj)
}
- case ParOut(c,lv,rv,left,right) => {
- writer.writeString("ParOut")
+ case ParOut(c, lv, rv, left, right) => {
+ writer.writeString("ParOut")
writer.writeName("c")
writer.writeString(c)
writer.writeName("cl")
@@ -89,30 +90,30 @@ class TermCodec(registry: CodecRegistry) extends Codec[Term] {
writer.writeName("cr")
writer.writeString(rv)
writer.writeName("l")
- encoderContext.encodeWithChildContext(this,writer,left)
+ encoderContext.encodeWithChildContext(this, writer, left)
writer.writeName("r")
- encoderContext.encodeWithChildContext(this,writer,right)
+ encoderContext.encodeWithChildContext(this, writer, right)
}
- case LeftOut(c,lc,cont) => {
- writer.writeString("LeftOut")
+ case LeftOut(c, lc, cont) => {
+ writer.writeString("LeftOut")
writer.writeName("c")
writer.writeString(c)
writer.writeName("cl")
writer.writeString(lc)
writer.writeName("t")
- encoderContext.encodeWithChildContext(this,writer,cont)
+ encoderContext.encodeWithChildContext(this, writer, cont)
}
- case RightOut(c,rc,cont) => {
- writer.writeString("RightOut")
+ case RightOut(c, rc, cont) => {
+ writer.writeString("RightOut")
writer.writeName("c")
writer.writeString(c)
writer.writeName("cr")
writer.writeString(rc)
writer.writeName("t")
- encoderContext.encodeWithChildContext(this,writer,cont)
+ encoderContext.encodeWithChildContext(this, writer, cont)
}
- case PiCut(z,cl,cr,left,right) => {
- writer.writeString("PiCut")
+ case PiCut(z, cl, cr, left, right) => {
+ writer.writeString("PiCut")
writer.writeName("z")
writer.writeString(z)
writer.writeName("cl")
@@ -120,23 +121,23 @@ class TermCodec(registry: CodecRegistry) extends Codec[Term] {
writer.writeName("cr")
writer.writeString(cr)
writer.writeName("l")
- encoderContext.encodeWithChildContext(this,writer,left)
+ encoderContext.encodeWithChildContext(this, writer, left)
writer.writeName("r")
- encoderContext.encodeWithChildContext(this,writer,right)
+ encoderContext.encodeWithChildContext(this, writer, right)
}
case PiCall(name, args) => { // TODO we might want to have an independent PiCall codec for PiState
- writer.writeString("PiCall")
+ writer.writeString("PiCall")
writer.writeName("name")
writer.writeString(name)
- writeArray( writer, "args", args ) {
- encoderContext.encodeWithChildContext( piobjCodec, writer, _ )
+ writeArray(writer, "args", args) {
+ encoderContext.encodeWithChildContext(piobjCodec, writer, _)
}
}
//encoderContext.encodeWithChildContext(this,writer,r)
- }
- writer.writeEndDocument()
+ }
+ writer.writeEndDocument()
}
override def getEncoderClass: Class[Term] = classOf[Term]
@@ -144,13 +145,13 @@ class TermCodec(registry: CodecRegistry) extends Codec[Term] {
override def decode(reader: BsonReader, decoderContext: DecoderContext): Term = {
reader.readStartDocument()
reader.readName("_t")
- val ret:Term = reader.readString() match {
+ val ret: Term = reader.readString() match {
case "Devour" => {
reader.readName("c")
val c = reader.readString()
reader.readName("v")
val v = reader.readString()
- Devour(c,v)
+ Devour(c, v)
}
case "In" => {
reader.readName("c")
@@ -158,8 +159,8 @@ class TermCodec(registry: CodecRegistry) extends Codec[Term] {
reader.readName("v")
val v = reader.readString()
reader.readName("t")
- val cont = decoderContext.decodeWithChildContext(this,reader)
- In(c,v,cont)
+ val cont = decoderContext.decodeWithChildContext(this, reader)
+ In(c, v, cont)
}
case "ParIn" => {
reader.readName("c")
@@ -169,10 +170,10 @@ class TermCodec(registry: CodecRegistry) extends Codec[Term] {
reader.readName("cr")
val cr = reader.readString()
reader.readName("l")
- val l = decoderContext.decodeWithChildContext(this,reader)
+ val l = decoderContext.decodeWithChildContext(this, reader)
reader.readName("r")
- val r = decoderContext.decodeWithChildContext(this,reader)
- ParIn(c,cl,cr,l,r)
+ val r = decoderContext.decodeWithChildContext(this, reader)
+ ParIn(c, cl, cr, l, r)
}
case "ParInI" => {
reader.readName("c")
@@ -182,8 +183,8 @@ class TermCodec(registry: CodecRegistry) extends Codec[Term] {
reader.readName("cr")
val cr = reader.readString()
reader.readName("t")
- val t = decoderContext.decodeWithChildContext(this,reader)
- ParInI(c,cl,cr,t)
+ val t = decoderContext.decodeWithChildContext(this, reader)
+ ParInI(c, cl, cr, t)
}
case "WithIn" => {
reader.readName("c")
@@ -193,17 +194,17 @@ class TermCodec(registry: CodecRegistry) extends Codec[Term] {
reader.readName("cr")
val cr = reader.readString()
reader.readName("l")
- val l = decoderContext.decodeWithChildContext(this,reader)
+ val l = decoderContext.decodeWithChildContext(this, reader)
reader.readName("r")
- val r = decoderContext.decodeWithChildContext(this,reader)
- WithIn(c,cl,cr,l,r)
+ val r = decoderContext.decodeWithChildContext(this, reader)
+ WithIn(c, cl, cr, l, r)
}
case "Out" => {
reader.readName("c")
val c = reader.readString()
reader.readName("v")
- val v = decoderContext.decodeWithChildContext(piobjCodec,reader)
- Out(c,v)
+ val v = decoderContext.decodeWithChildContext(piobjCodec, reader)
+ Out(c, v)
}
case "ParOut" => {
reader.readName("c")
@@ -213,10 +214,10 @@ class TermCodec(registry: CodecRegistry) extends Codec[Term] {
reader.readName("cr")
val cr = reader.readString()
reader.readName("l")
- val l = decoderContext.decodeWithChildContext(this,reader)
+ val l = decoderContext.decodeWithChildContext(this, reader)
reader.readName("r")
- val r = decoderContext.decodeWithChildContext(this,reader)
- ParOut(c,cl,cr,l,r)
+ val r = decoderContext.decodeWithChildContext(this, reader)
+ ParOut(c, cl, cr, l, r)
}
case "LeftOut" => {
reader.readName("c")
@@ -224,17 +225,17 @@ class TermCodec(registry: CodecRegistry) extends Codec[Term] {
reader.readName("cl")
val cl = reader.readString()
reader.readName("t")
- val t = decoderContext.decodeWithChildContext(this,reader)
- LeftOut(c,cl,t)
- }
+ val t = decoderContext.decodeWithChildContext(this, reader)
+ LeftOut(c, cl, t)
+ }
case "RightOut" => {
reader.readName("c")
val c = reader.readString()
reader.readName("cr")
val cr = reader.readString()
reader.readName("t")
- val t = decoderContext.decodeWithChildContext(this,reader)
- RightOut(c,cr,t)
+ val t = decoderContext.decodeWithChildContext(this, reader)
+ RightOut(c, cr, t)
}
case "PiCut" => {
reader.readName("z")
@@ -244,24 +245,23 @@ class TermCodec(registry: CodecRegistry) extends Codec[Term] {
reader.readName("cr")
val cr = reader.readString()
reader.readName("l")
- val l = decoderContext.decodeWithChildContext(this,reader)
+ val l = decoderContext.decodeWithChildContext(this, reader)
reader.readName("r")
- val r = decoderContext.decodeWithChildContext(this,reader)
- PiCut(c,cl,cr,l,r)
+ val r = decoderContext.decodeWithChildContext(this, reader)
+ PiCut(c, cl, cr, l, r)
}
case "PiCall" => {
reader.readName("name")
val n = reader.readString()
- val args: ArrayBuffer[Chan]
- = readArray( reader, "args" ) { () =>
- decoderContext
- .decodeWithChildContext( piobjCodec, reader )
- .asInstanceOf[Chan]
- }
+ val args: ArrayBuffer[Chan] = readArray(reader, "args") { () =>
+ decoderContext
+ .decodeWithChildContext(piobjCodec, reader)
+ .asInstanceOf[Chan]
+ }
- PiCall( n, args )
- }
+ PiCall(n, args)
+ }
// val l = decoderContext.decodeWithChildContext(this,reader)
}
reader.readEndDocument()
@@ -270,16 +270,16 @@ class TermCodec(registry: CodecRegistry) extends Codec[Term] {
}
/**
- * CodecProvider for PiObjectCodec
- *
- * Created using this as an example:
- * https://github.com/mongodb/mongo-scala-driver/blob/master/bson/src/main/scala/org/mongodb/scala/bson/codecs/DocumentCodecProvider.scala
- */
-class TermCodecProvider(bsonTypeClassMap:BsonTypeClassMap) extends CodecProvider {
- val PROVIDEDCLASS:Class[Term] = classOf[Term]
-
- override def get[T](clazz:Class[T], registry:CodecRegistry):Codec[T] = clazz match {
- case PROVIDEDCLASS => new TermCodec(registry).asInstanceOf[Codec[T]]
- case _ => null
- }
-}
\ No newline at end of file
+ * CodecProvider for PiObjectCodec
+ *
+ * Created using this as an example:
+ * https://github.com/mongodb/mongo-scala-driver/blob/master/bson/src/main/scala/org/mongodb/scala/bson/codecs/DocumentCodecProvider.scala
+ */
+class TermCodecProvider(bsonTypeClassMap: BsonTypeClassMap) extends CodecProvider {
+ val PROVIDEDCLASS: Class[Term] = classOf[Term]
+
+ override def get[T](clazz: Class[T], registry: CodecRegistry): Codec[T] = clazz match {
+ case PROVIDEDCLASS => new TermCodec(registry).asInstanceOf[Codec[T]]
+ case _ => null
+ }
+}
diff --git a/src/com/workflowfm/pew/simulation/Coordinator.scala b/src/com/workflowfm/pew/simulation/Coordinator.scala
index 198065a..8691d7d 100644
--- a/src/com/workflowfm/pew/simulation/Coordinator.scala
+++ b/src/com/workflowfm/pew/simulation/Coordinator.scala
@@ -7,29 +7,27 @@ import scala.concurrent.duration._
import com.workflowfm.pew.simulation.metrics._
import com.workflowfm.pew.execution._
import scala.collection.mutable.PriorityQueue
-import scala.concurrent.{ Promise, Await, ExecutionContext }
-import scala.util.{ Success, Failure }
-
-
+import scala.concurrent.{Promise, Await, ExecutionContext}
+import scala.util.{Success, Failure}
object Coordinator {
case object Start
//TODO case object Stop
- case class Done(time:Long,metrics:SimMetricsAggregator)
+ case class Done(time: Long, metrics: SimMetricsAggregator)
case object Ping
- case class Time(time:Long)
-
- case class AddSim(t:Long,sim:Simulation,exe:SimulatorExecutor[_])
- case class AddSims(l:Seq[(Long,Simulation)],exe:SimulatorExecutor[_])
- case class AddSimNow(sim:Simulation,exe:SimulatorExecutor[_])
- case class AddSimsNow(l:Seq[Simulation],exe:SimulatorExecutor[_])
-
- case class AddResource(r:TaskResource)
- case class AddResources(l:Seq[TaskResource])
-
- case class SimDone(name:String,result:String)
-
- case class AddTask(t:TaskGenerator, promise:Promise[TaskMetrics], resources:Seq[String])
+ case class Time(time: Long)
+
+ case class AddSim(t: Long, sim: Simulation, exe: SimulatorExecutor[_])
+ case class AddSims(l: Seq[(Long, Simulation)], exe: SimulatorExecutor[_])
+ case class AddSimNow(sim: Simulation, exe: SimulatorExecutor[_])
+ case class AddSimsNow(l: Seq[Simulation], exe: SimulatorExecutor[_])
+
+ case class AddResource(r: TaskResource)
+ case class AddResources(l: Seq[TaskResource])
+
+ case class SimDone(name: String, result: String)
+
+ case class AddTask(t: TaskGenerator, promise: Promise[TaskMetrics], resources: Seq[String])
case object AckTask
private case object Tick
@@ -37,57 +35,58 @@ object Coordinator {
private case object Tock
def props(
- scheduler: Scheduler,
- startingTime: Long = 0L,
- timeoutMillis: Int = 50
- )(implicit system: ActorSystem, executionContext:ExecutionContext
- ): Props = Props(new Coordinator(scheduler,startingTime,timeoutMillis)(system,executionContext))
+ scheduler: Scheduler,
+ startingTime: Long = 0L,
+ timeoutMillis: Int = 50
+ )(implicit system: ActorSystem, executionContext: ExecutionContext): Props =
+ Props(new Coordinator(scheduler, startingTime, timeoutMillis)(system, executionContext))
}
class Coordinator(
- scheduler :Scheduler,
- startingTime:Long,
- timeoutMillis:Int
+ scheduler: Scheduler,
+ startingTime: Long,
+ timeoutMillis: Int
)(
- implicit system: ActorSystem,
- implicit val executionContext:ExecutionContext
+ implicit system: ActorSystem,
+ implicit val executionContext: ExecutionContext
) extends Actor {
import scala.collection.mutable.Queue
-
+
sealed trait Event extends Ordered[Event] {
- def time:Long
- def compare(that:Event) = {
+ def time: Long
+ def compare(that: Event) = {
that.time.compare(time)
}
}
- case class FinishingTask(override val time:Long,task:Task) extends Event
- case class StartingSim(override val time:Long,simulation:Simulation,executor:SimulatorExecutor[_]) extends Event
-
- var resourceMap :Map[String,TaskResource] = Map[String,TaskResource]() ///: resources){ case (m,r) => m + (r.name -> r)}
- var simulations :Map[String,SimulatorExecutor[_]] = Map[String,SimulatorExecutor[_]]()
- val tasks :Queue[Task] = Queue()
-
+ case class FinishingTask(override val time: Long, task: Task) extends Event
+ case class StartingSim(override val time: Long, simulation: Simulation, executor: SimulatorExecutor[_]) extends Event
+
+ var resourceMap
+ : Map[String, TaskResource] = Map[String, TaskResource]() ///: resources){ case (m,r) => m + (r.name -> r)}
+ var simulations: Map[String, SimulatorExecutor[_]] = Map[String, SimulatorExecutor[_]]()
+ val tasks: Queue[Task] = Queue()
+
val events = new PriorityQueue[Event]()
-
- var starter:Option[ActorRef] = None
- var time = startingTime
- var taskID = 0L
-
+
+ var starter: Option[ActorRef] = None
+ var time = startingTime
+ var taskID = 0L
+
val metrics = new SimMetricsAggregator()
-
+
//implicit val timeout = Timeout(timeoutMillis.millis)
-
- def addResource(r:TaskResource) = if (!resourceMap.contains(r.name)) {
+
+ def addResource(r: TaskResource) = if (!resourceMap.contains(r.name)) {
println(s"[$time] Adding resource ${r.name}")
resourceMap += r.name -> r
metrics += r
}
- protected def handleEvent(event:Event) = event match {
+ protected def handleEvent(event: Event) = event match {
//println("["+time+"] ========= Event! ========= ")
// A task is finished
- case FinishingTask(t,task) if (t == time) => {
+ case FinishingTask(t, task) if (t == time) => {
// Unbind the resources
//resourceMap map { case (n,r) => (n,resourceUpdate(r)) } TODO why was that better originally?
task.taskResources(resourceMap).foreach(_.finishTask(time))
@@ -96,79 +95,80 @@ class Coordinator(
task.complete(metrics.taskMap.getOrElse(task.id, TaskMetrics(task).start(time - task.duration)))
}
// A simulation (workflow) is starting now
- case StartingSim(t,sim,exec) if (t == time)=> startSimulation(time,sim,exec)
- case _ => println(s"[$time] <*> <*> <*> Failed to handle event: $event")
+ case StartingSim(t, sim, exec) if (t == time) => startSimulation(time, sim, exec)
+ case _ => println(s"[$time] <*> <*> <*> Failed to handle event: $event")
}
- protected def startSimulation(t:Long, s:Simulation, e:SimulatorExecutor[_]) :Boolean = {
+ protected def startSimulation(t: Long, s: Simulation, e: SimulatorExecutor[_]): Boolean = {
if (t == time) {
- println("["+time+"] Starting simulation: \"" + s.name +"\".")
- metrics += (s,t)
- s.run(e).onComplete({
- case Success(res) => {
- stopSimulation(s.name, res.toString)
- println("*** Result of " + s.name + ": " + res)
- }
- case Failure(ex) => {
- stopSimulation(s.name, ex.getLocalizedMessage)
- println("*** Exception in " + s.name + ": ")
- ex.printStackTrace()
- }
- })
+ println("[" + time + "] Starting simulation: \"" + s.name + "\".")
+ metrics += (s, t)
+ s.run(e)
+ .onComplete({
+ case Success(res) => {
+ stopSimulation(s.name, res.toString)
+ println("*** Result of " + s.name + ": " + res)
+ }
+ case Failure(ex) => {
+ stopSimulation(s.name, ex.getLocalizedMessage)
+ println("*** Exception in " + s.name + ": ")
+ ex.printStackTrace()
+ }
+ })
simulations += (s.name -> e)
true
} else false
}
- protected def stopSimulation(name:String, result:String) = {
+ protected def stopSimulation(name: String, result: String) = {
simulations -= name
- (metrics^name) (_.done(result,time))
+ (metrics ^ name)(_.done(result, time))
println(s"[$time] Simulation $name reported done.")
}
-
+
// protected def updateSimulation(s:String, f:WorkflowMetricTracker=>WorkflowMetricTracker) = simulations = simulations map {
// case (n,m,e) if n.equals(s) => (n,f(m),e)
// case x => x
// }
-
- protected def resourceIdle(r:TaskResource) = if (r.isIdle) {
- (metrics^r)(_.idle(time-r.lastUpdate))
+
+ protected def resourceIdle(r: TaskResource) = if (r.isIdle) {
+ (metrics ^ r)(_.idle(time - r.lastUpdate))
r.update(time)
}
- protected def startTask(task:Task) {
- tasks.dequeueFirst (_.id == task.id)
+ protected def startTask(task: Task) {
+ tasks.dequeueFirst(_.id == task.id)
// Mark the start of the task in the metrics
- (metrics^task.id)(_.start(time))
+ (metrics ^ task.id)(_.start(time))
task.taskResources(resourceMap) map { r =>
// Update idle time if resource has been idle
- if (r.isIdle) (metrics^r)(_.idle(time-r.lastUpdate))
+ if (r.isIdle) (metrics ^ r)(_.idle(time - r.lastUpdate))
// Bind each resource to this task
r.startTask(task, time)
// Add the task and resource cost to the resource metrics
- (metrics^r)(_.task(task, r.costPerTick))
+ (metrics ^ r)(_.task(task, r.costPerTick))
}
// Add the task to the simulation metrics
- (metrics^task.simulation)(_.task(task).addDelay(time - task.created))
+ (metrics ^ task.simulation)(_.task(task).addDelay(time - task.created))
// Generate a FinishTask event to be triggered at the end of the event
- events += FinishingTask(time+task.duration,task)
+ events += FinishingTask(time + task.duration, task)
}
-
+
/**
* Runs all tasks that require no resources
*/
protected def runNoResourceTasks() = tasks.dequeueAll(_.resources.isEmpty).map(startTask)
-
+
protected def workflowsReady = simulations forall (_._2.simulationReady)
-
+
/**
* First half of a clock tick
* We need two halves because we want to give the workflows a chance to reduce
* and register new tasks to the Coordinator. The Coordinator must receive those
- * through messages between now and the Tack message.
+ * through messages between now and the Tack message.
*/
- protected def tick :Unit = {
+ protected def tick: Unit = {
// Are events pending?
if (!events.isEmpty) {
@@ -179,12 +179,11 @@ class Coordinator(
println(s"[$time] *** Unable to handle past event for time: [${event.time}]")
} else {
// Jump ahead to the event time. This is a priority queue so we shouldn't skip any events
- time = event.time
+ time = event.time
handleEvent(event)
// Handle all events that are supposed to happen now, so that we free resources for the Scheduler
- while (events.headOption.map(_.time == time).getOrElse(false))
- handleEvent(events.dequeue)
+ while (events.headOption.map(_.time == time).getOrElse(false)) handleEvent(events.dequeue)
}
}
// val startedActors = res filter (_._1 == true) map (_._2.executor)
@@ -194,8 +193,7 @@ class Coordinator(
self ! Coordinator.Tack
}
-
- protected def tack :Unit = {
+ protected def tack: Unit = {
println(s"[$time] Waiting for workflow progress...")
Thread.sleep(timeoutMillis)
@@ -210,9 +208,8 @@ class Coordinator(
self ! Coordinator.Tock
}
}
-
- protected def tock :Unit = {
+ protected def tock: Unit = {
// Make sure we run tasks that need no resources
runNoResourceTasks()
@@ -221,62 +218,66 @@ class Coordinator(
// Update idle resources
resourceMap.values.foreach(resourceIdle)
-
// We finish if there are no events, no tasks, and all workflows have reduced
// Actually all workflows must have reduced at this stage, but we check anyway
//println(s"!TOCK! Events:${events.size} Tasks:${tasks.size} Workflows:$workflowsReady Simulations:${simulations.size} ")
- if (events.isEmpty && tasks.isEmpty && workflowsReady) { // && simulations.isEmpty) { //&& resources.forall(_.isIdle)
- println("["+time+"] All events done. All tasks done. All workflows idle.") // All simulations done..")
+ if (events.isEmpty && tasks.isEmpty && workflowsReady) { // && simulations.isEmpty) { //&& resources.forall(_.isIdle)
+ println("[" + time + "] All events done. All tasks done. All workflows idle.") // All simulations done..")
metrics.ended
// Tell whoever started us that we are done
- starter map { a => a ! Coordinator.Done(time,metrics) }
+ starter map { a =>
+ a ! Coordinator.Done(time, metrics)
+ }
} else {
self ! Coordinator.Tick
}
}
- def start(a:ActorRef) = if (starter.isEmpty) {
+ def start(a: ActorRef) = if (starter.isEmpty) {
starter = Some(a)
metrics.started
tick
}
def receive = {
- case Coordinator.AddSim(t,s,e) => events += StartingSim(t,s,e)
- case Coordinator.AddSims(l,e) => events ++= l map { case (t,s) => StartingSim(t,s,e) }
- case Coordinator.AddSimNow(s,e) => events += StartingSim(time,s,e)
- case Coordinator.AddSimsNow(l,e) => events ++= l map { s => StartingSim(time,s,e) }
-
- case Coordinator.AddResource(r) => addResource(r)
+ case Coordinator.AddSim(t, s, e) => events += StartingSim(t, s, e)
+ case Coordinator.AddSims(l, e) => events ++= l map { case (t, s) => StartingSim(t, s, e) }
+ case Coordinator.AddSimNow(s, e) => events += StartingSim(time, s, e)
+ case Coordinator.AddSimsNow(l, e) =>
+ events ++= l map { s =>
+ StartingSim(time, s, e)
+ }
+
+ case Coordinator.AddResource(r) => addResource(r)
case Coordinator.AddResources(r) => r foreach addResource
-
- case Coordinator.AddTask(gen,promise,resources) => {
+
+ case Coordinator.AddTask(gen, promise, resources) => {
val creation = if (gen.createTime >= 0) gen.createTime else time
// Create the task
- val t = gen.create(taskID,creation,resources:_*)
+ val t = gen.create(taskID, creation, resources: _*)
// This is ok only if the simulation is running in memory
// Promises cannot be sent over messages otherwise as they are not serializable
promise.completeWith(t.promise.future)
// Make sure the next taskID will be fresh
taskID = taskID + 1L
println(s"[$time] Adding task [$taskID] created at [$creation]: ${t.name} (${t.simulation}).")
-
+
// Calculate the cost of all resource usage. We only know this now!
- val resourceCost = (0L /: t.taskResources(resourceMap)) { case (c,r) => c + r.costPerTick * t.duration }
+ val resourceCost = (0L /: t.taskResources(resourceMap)) { case (c, r) => c + r.costPerTick * t.duration }
t.addCost(resourceCost)
-
+
metrics += t
tasks += t
-
+
//sender() ! Coordinator.AckTask(t) //uncomment this to acknowledge AddTask
}
case Coordinator.Start => start(sender)
- case Coordinator.Tick => tick
- case Coordinator.Tack => tack
- case Coordinator.Tock => tock
- case Coordinator.Ping => sender() ! Coordinator.Time(time)
+ case Coordinator.Tick => tick
+ case Coordinator.Tack => tack
+ case Coordinator.Tock => tock
+ case Coordinator.Ping => sender() ! Coordinator.Time(time)
}
}
diff --git a/src/com/workflowfm/pew/simulation/Generators.scala b/src/com/workflowfm/pew/simulation/Generators.scala
index a52b4bd..c91478c 100644
--- a/src/com/workflowfm/pew/simulation/Generators.scala
+++ b/src/com/workflowfm/pew/simulation/Generators.scala
@@ -1,16 +1,16 @@
package com.workflowfm.pew.simulation
trait ValueGenerator[T] {
- def get :T
- def estimate :T
+ def get: T
+ def estimate: T
}
-case class ConstantGenerator[T](value :T) extends ValueGenerator[T] {
- def get = value
+case class ConstantGenerator[T](value: T) extends ValueGenerator[T] {
+ def get = value
def estimate = value
}
-case class UniformGenerator(min :Int, max:Int) extends ValueGenerator[Int] {
- def get = new util.Random().nextInt(max-min) + min
+case class UniformGenerator(min: Int, max: Int) extends ValueGenerator[Int] {
+ def get = new util.Random().nextInt(max - min) + min
def estimate = (max + min) / 2
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/simulation/Scheduler.scala b/src/com/workflowfm/pew/simulation/Scheduler.scala
index 1c80818..6a00010 100644
--- a/src/com/workflowfm/pew/simulation/Scheduler.scala
+++ b/src/com/workflowfm/pew/simulation/Scheduler.scala
@@ -3,62 +3,61 @@ package com.workflowfm.pew.simulation
import scala.annotation.tailrec
trait Scheduler {
- def getNextTasks(tasks:Seq[Task], currentTime:Long, resourceMap:Map[String,TaskResource]) :Seq[Task]
+ def getNextTasks(tasks: Seq[Task], currentTime: Long, resourceMap: Map[String, TaskResource]): Seq[Task]
+
+ def isIdleResource(r: String, resourceMap: Map[String, TaskResource]) = resourceMap.get(r) match {
+ case None => false
+ case Some(s) => s.isIdle
+ }
- def isIdleResource(r:String, resourceMap:Map[String,TaskResource]) = resourceMap.get(r) match {
- case None => false
- case Some(s) => s.isIdle
- }
-
}
object DefaultScheduler extends Scheduler {
import scala.collection.immutable.Queue
- override def getNextTasks(tasks:Seq[Task], currentTime:Long, resourceMap:Map[String,TaskResource]) :Seq[Task] =
+ override def getNextTasks(tasks: Seq[Task], currentTime: Long, resourceMap: Map[String, TaskResource]): Seq[Task] =
findNextTasks(currentTime, resourceMap, resourceMap.mapValues(Schedule(_)), tasks.toList, Queue())
@tailrec
def findNextTasks(
- currentTime:Long,
- resourceMap:Map[String,TaskResource],
- schedules:Map[String,Schedule],
- tasks:List[Task],
- result:Queue[Task]
- ):Seq[Task] = tasks match {
+ currentTime: Long,
+ resourceMap: Map[String, TaskResource],
+ schedules: Map[String, Schedule],
+ tasks: List[Task],
+ result: Queue[Task]
+ ): Seq[Task] = tasks match {
case Nil => result
case t :: rest => {
- val start = Schedule.merge(t.resources.flatMap(schedules.get(_))) ? (currentTime,t)
+ val start = Schedule.merge(t.resources.flatMap(schedules.get(_))) ? (currentTime, t)
val schedules2 = (schedules /: t.resources) {
- case (s,r) => s + (r -> (s.getOrElse(r,Schedule(Nil)) +> (start,t)))
+ case (s, r) => s + (r -> (s.getOrElse(r, Schedule(Nil)) +> (start, t)))
}
val result2 = if (start == currentTime && t.taskResources(resourceMap).forall(_.isIdle)) result :+ t else result
findNextTasks(currentTime, resourceMap, schedules2, rest, result2)
}
- }
+ }
}
-case class Schedule(tasks:List[(Long,Long)]) {
+case class Schedule(tasks: List[(Long, Long)]) {
- def +(start:Long,end:Long):Option[Schedule] = Schedule.add(start,end,tasks) match {
- case None => None
- case Some(l) => Some(copy(tasks=l))
+ def +(start: Long, end: Long): Option[Schedule] = Schedule.add(start, end, tasks) match {
+ case None => None
+ case Some(l) => Some(copy(tasks = l))
}
- def +>(start:Long,end:Long):Schedule = Schedule.add(start,end,tasks) match {
+ def +>(start: Long, end: Long): Schedule = Schedule.add(start, end, tasks) match {
case None => {
System.err.println(s"*** Unable to add ($start,$end) to Schedule: $tasks")
this
}
- case Some(l) => copy(tasks=l)
+ case Some(l) => copy(tasks = l)
}
- def +>(startTime:Long, t:Task):Schedule = this +> (startTime,startTime+t.estimatedDuration)
+ def +>(startTime: Long, t: Task): Schedule = this +> (startTime, startTime + t.estimatedDuration)
- def ?(currentTime:Long, t:Task):Long = Schedule.fit(currentTime,t.estimatedDuration,tasks)
+ def ?(currentTime: Long, t: Task): Long = Schedule.fit(currentTime, t.estimatedDuration, tasks)
-
- def ++(s:Schedule):Schedule = Schedule(Schedule.merge(tasks,s.tasks))
+ def ++(s: Schedule): Schedule = Schedule(Schedule.merge(tasks, s.tasks))
def isValid = Schedule.isValid(tasks)
}
@@ -66,102 +65,109 @@ case class Schedule(tasks:List[(Long,Long)]) {
object Schedule {
import scala.collection.immutable.Queue
- def apply(r:TaskResource):Schedule = r.currentTask match {
- case None => Schedule(List())
- case Some((s,t)) => Schedule((s,s + t.estimatedDuration) :: Nil)
+ def apply(r: TaskResource): Schedule = r.currentTask match {
+ case None => Schedule(List())
+ case Some((s, t)) => Schedule((s, s + t.estimatedDuration) :: Nil)
}
@tailrec
- def add (
- start:Long, end:Long,
- tasks:List[(Long,Long)],
- result:Queue[(Long,Long)] = Queue[(Long,Long)]()
- ):Option[List[(Long,Long)]] = tasks match {
- case Nil => Some(result :+ (start,end) toList)
- case (l:Long,r:Long) :: t =>
- if (l > end) Some(result ++ ((start,end) :: (l,r) :: t) toList)
- else if (l == end) Some(result ++ ((start,r) :: t) toList)
- else if (r < start) add(start,end,t,result :+ ((l,r)))
- else if (r == start) add(l,end,t,result)
+ def add(
+ start: Long,
+ end: Long,
+ tasks: List[(Long, Long)],
+ result: Queue[(Long, Long)] = Queue[(Long, Long)]()
+ ): Option[List[(Long, Long)]] = tasks match {
+ case Nil => Some(result :+ (start, end) toList)
+ case (l: Long, r: Long) :: t =>
+ if (l > end) Some(result ++ ((start, end) :: (l, r) :: t) toList)
+ else if (l == end) Some(result ++ ((start, r) :: t) toList)
+ else if (r < start) add(start, end, t, result :+ ((l, r)))
+ else if (r == start) add(l, end, t, result)
else /* if (r >= end) */ None
- //else None
+ //else None
}
@tailrec
- def fit (
- start:Long,
- duration:Long,
- tasks:List[(Long,Long)]
- ):Long = tasks match {
- case Nil => start
- case (l,_) :: _ if (l >= start + duration) => start
- case (_,r) :: t => fit(r,duration,t)
+ def fit(
+ start: Long,
+ duration: Long,
+ tasks: List[(Long, Long)]
+ ): Long = tasks match {
+ case Nil => start
+ case (l, _) :: _ if (l >= start + duration) => start
+ case (_, r) :: t => fit(r, duration, t)
}
@tailrec
def merge(
- g1:List[(Long,Long)],
- g2:List[(Long,Long)],
- result:Queue[(Long,Long)] = Queue[(Long,Long)]()
- ):List[(Long,Long)] = g1 match {
+ g1: List[(Long, Long)],
+ g2: List[(Long, Long)],
+ result: Queue[(Long, Long)] = Queue[(Long, Long)]()
+ ): List[(Long, Long)] = g1 match {
case Nil => result ++ g2 toList
- case (l1,r1) :: t1 => g2 match {
- case Nil => result ++ g1 toList
- case (l2,r2) :: t2 => {
- if (r2 < l1) merge(g1,t2,result :+ (l2,r2))
- else if (r1 < l2) merge(t1,g2,result :+ (l1,r1))
- else if (r1 == r2) merge(t1,t2,result :+ (math.min(l1,l2),r1))
- else if (r2 == l1) merge((l2,r1)::t1,t2,result)
- else if (r1 == l2) merge(t1,(l1,r2)::t2,result)
- else if (r1 < r2) merge(t1,(math.min(l1,l2),r2)::t2,result)
- else /* if (r1 > r2)*/ merge((math.min(l1,l2),r1)::t1,t2,result)
+ case (l1, r1) :: t1 =>
+ g2 match {
+ case Nil => result ++ g1 toList
+ case (l2, r2) :: t2 => {
+ if (r2 < l1) merge(g1, t2, result :+ (l2, r2))
+ else if (r1 < l2) merge(t1, g2, result :+ (l1, r1))
+ else if (r1 == r2) merge(t1, t2, result :+ (math.min(l1, l2), r1))
+ else if (r2 == l1) merge((l2, r1) :: t1, t2, result)
+ else if (r1 == l2) merge(t1, (l1, r2) :: t2, result)
+ else if (r1 < r2) merge(t1, (math.min(l1, l2), r2) :: t2, result)
+ else /* if (r1 > r2)*/ merge((math.min(l1, l2), r1) :: t1, t2, result)
+ }
}
- }
}
- def merge(schedules:Seq[Schedule]):Schedule = {
+ def merge(schedules: Seq[Schedule]): Schedule = {
(Schedule(List()) /: schedules)(_ ++ _)
}
- @deprecated("No longer using gaps in Schedule","1.2.0")
- def fitInGaps (
- start:Long, end:Long,
- gaps:List[(Long,Long)],
- result:Queue[(Long,Long)] = Queue[(Long,Long)]()
- ):Option[List[(Long,Long)]] = gaps match {
+ @deprecated("No longer using gaps in Schedule", "1.2.0")
+ def fitInGaps(
+ start: Long,
+ end: Long,
+ gaps: List[(Long, Long)],
+ result: Queue[(Long, Long)] = Queue[(Long, Long)]()
+ ): Option[List[(Long, Long)]] = gaps match {
case Nil => Some(result.toList)
- case (l:Long,r:Long) :: t =>
- if (l == start && end == r) fitInGaps(start,end,t,result) // event fits exactly
- else if (l == start && end <= r) fitInGaps(start,end,t,result :+ ((end,r)) )// add an event at the beginning of the gap
- else if (l <= start && end == r) fitInGaps(start,end,t,result :+ ((l,start)) ) // add an event at the end of the gaps
- else if (l < start && end < r) fitInGaps(start,end,t,result :+ ((l,start)) :+ ((end,r)) ) // add an event within a gap
- else if (start > r || end < l) fitInGaps(start,end,t,result :+ ((l,r)) )
+ case (l: Long, r: Long) :: t =>
+ if (l == start && end == r) fitInGaps(start, end, t, result) // event fits exactly
+ else if (l == start && end <= r)
+ fitInGaps(start, end, t, result :+ ((end, r))) // add an event at the beginning of the gap
+ else if (l <= start && end == r)
+ fitInGaps(start, end, t, result :+ ((l, start))) // add an event at the end of the gaps
+ else if (l < start && end < r)
+ fitInGaps(start, end, t, result :+ ((l, start)) :+ ((end, r))) // add an event within a gap
+ else if (start > r || end < l) fitInGaps(start, end, t, result :+ ((l, r)))
else None
}
- @deprecated("No longer using gaps in Schedule","1.2.0")
+ @deprecated("No longer using gaps in Schedule", "1.2.0")
// we assume all gap lists finish with a (t,Long.MaxValue) gap
- def mergeGaps (
- g1:List[(Long,Long)],
- g2:List[(Long,Long)],
- result:Queue[(Long,Long)] = Queue[(Long,Long)]()
- ):List[(Long,Long)] = g1 match {
+ def mergeGaps(
+ g1: List[(Long, Long)],
+ g2: List[(Long, Long)],
+ result: Queue[(Long, Long)] = Queue[(Long, Long)]()
+ ): List[(Long, Long)] = g1 match {
case Nil => result toList
- case (l1,r1) :: t1 => g2 match {
- case Nil => result toList
- case (l2,r2) :: t2 => {
- if (r2 <= l1) mergeGaps(g1,t2,result)
- else if (r1 <= l2) mergeGaps (t1,g2,result)
- else if (r1 == Long.MaxValue && r1 == r2) result :+ (math.max(l1,l2),r1) toList
- else if (r2 <= r1) mergeGaps(g1,t2,result :+ (math.max(l1,l2),r2))
- else /* if (r1 < r2) */ mergeGaps(t1,g2,result :+ (math.max(l1,l2),r1))
+ case (l1, r1) :: t1 =>
+ g2 match {
+ case Nil => result toList
+ case (l2, r2) :: t2 => {
+ if (r2 <= l1) mergeGaps(g1, t2, result)
+ else if (r1 <= l2) mergeGaps(t1, g2, result)
+ else if (r1 == Long.MaxValue && r1 == r2) result :+ (math.max(l1, l2), r1) toList
+ else if (r2 <= r1) mergeGaps(g1, t2, result :+ (math.max(l1, l2), r2))
+ else /* if (r1 < r2) */ mergeGaps(t1, g2, result :+ (math.max(l1, l2), r1))
+ }
}
- }
}
- def isValid(gaps:List[(Long,Long)], end:Long = Long.MinValue):Boolean = gaps match {
- case Nil => true
- case (l,r) :: t if end < l && l < r => isValid(t, r)
- case _ => false
+ def isValid(gaps: List[(Long, Long)], end: Long = Long.MinValue): Boolean = gaps match {
+ case Nil => true
+ case (l, r) :: t if end < l && l < r => isValid(t, r)
+ case _ => false
}
}
diff --git a/src/com/workflowfm/pew/simulation/Simulation.scala b/src/com/workflowfm/pew/simulation/Simulation.scala
index a939f4b..c279649 100644
--- a/src/com/workflowfm/pew/simulation/Simulation.scala
+++ b/src/com/workflowfm/pew/simulation/Simulation.scala
@@ -11,16 +11,25 @@ import scala.concurrent.ExecutionContext
import com.workflowfm.pew.PiProcess
import com.workflowfm.pew.execution.SimulatorExecutor
-
-abstract class Simulation(val name:String) { //extends SimulationMetricTracker
- def run(executor:SimulatorExecutor[_]):Future[Any]
- def getProcesses():Seq[PiProcess]
+abstract class Simulation(val name: String) { //extends SimulationMetricTracker
+ def run(executor: SimulatorExecutor[_]): Future[Any]
+ def getProcesses(): Seq[PiProcess]
}
-class TaskSimulation(simulationName:String, coordinator:ActorRef, resources:Seq[String], duration:ValueGenerator[Long], val cost:ValueGenerator[Long]=new ConstantGenerator(0L), interrupt:Int=(-1), priority:Task.Priority=Task.Medium)(implicit system: ActorSystem) extends Simulation(simulationName) {
- def run(executor:SimulatorExecutor[_]) = {
- TaskGenerator(simulationName + "Task", simulationName, duration, cost, interrupt, priority).addTo(coordinator,resources :_*)
- }
+class TaskSimulation(
+ simulationName: String,
+ coordinator: ActorRef,
+ resources: Seq[String],
+ duration: ValueGenerator[Long],
+ val cost: ValueGenerator[Long] = new ConstantGenerator(0L),
+ interrupt: Int = (-1),
+ priority: Task.Priority = Task.Medium
+)(implicit system: ActorSystem)
+ extends Simulation(simulationName) {
+ def run(executor: SimulatorExecutor[_]) = {
+ TaskGenerator(simulationName + "Task", simulationName, duration, cost, interrupt, priority)
+ .addTo(coordinator, resources: _*)
+ }
override def getProcesses() = Seq()
}
@@ -30,11 +39,14 @@ class MockExecutor extends Actor {
}
}
-trait SimulatedProcess { this:PiProcess =>
- def simulationName:String
- override def isSimulatedProcess = true
-
- def simulate[T](gen:TaskGenerator, coordinator:ActorRef, result:T, resources:String*)(implicit system: ActorSystem, context: ExecutionContext = ExecutionContext.global):Future[T] ={
- gen.addTo(coordinator, resources:_*).map(_ => result)
- }
+trait SimulatedProcess { this: PiProcess =>
+ def simulationName: String
+ override def isSimulatedProcess = true
+
+ def simulate[T](gen: TaskGenerator, coordinator: ActorRef, result: T, resources: String*)(
+ implicit system: ActorSystem,
+ context: ExecutionContext = ExecutionContext.global
+ ): Future[T] = {
+ gen.addTo(coordinator, resources: _*).map(_ => result)
+ }
}
diff --git a/src/com/workflowfm/pew/simulation/Task.scala b/src/com/workflowfm/pew/simulation/Task.scala
index 8d64909..3dbef48 100644
--- a/src/com/workflowfm/pew/simulation/Task.scala
+++ b/src/com/workflowfm/pew/simulation/Task.scala
@@ -9,56 +9,58 @@ import com.workflowfm.pew.simulation.metrics._
object Task {
sealed trait Priority extends Ordered[Priority] {
- def value:Int
- def compare(that:Priority) = this.value - that.value
+ def value: Int
+ def compare(that: Priority) = this.value - that.value
}
case object Highest extends Priority { val value = 5 }
- case object High extends Priority { val value = 4 }
- case object Medium extends Priority { val value = 3 }
- case object Low extends Priority { val value = 2 }
+ case object High extends Priority { val value = 4 }
+ case object Medium extends Priority { val value = 3 }
+ case object Low extends Priority { val value = 2 }
case object VeryLow extends Priority { val value = 1 }
}
-class Task (
- val id:Long,
- val name:String,
- val simulation:String,
- val created:Long,
- val resources:Seq[String],
- val duration:Long,
- val estimatedDuration:Long,
- val initialCost:Long,
- val interrupt:Int=Int.MaxValue,
- val priority:Task.Priority=Task.Medium
- ) extends Ordered[Task] {
-
- val promise:Promise[TaskMetrics] = Promise()
-
- var cost:Long = initialCost
-
- // execute will be called once by each associated TaskResource
- def complete(metrics:TaskMetrics) = if (!promise.isCompleted) promise.success(metrics)
-
- def addCost(extra:Long) = cost += extra
-
- def nextPossibleStart(currentTime:Long, resourceMap:Map[String,TaskResource]) = {
- (currentTime /: resources){ case (i,rN) => resourceMap.get(rN) match {
- case None => throw new RuntimeException(s"Resource $rN not found!")
- case Some(r) => Math.max(i,r.nextAvailableTimestamp(currentTime))
- }}
+class Task(
+ val id: Long,
+ val name: String,
+ val simulation: String,
+ val created: Long,
+ val resources: Seq[String],
+ val duration: Long,
+ val estimatedDuration: Long,
+ val initialCost: Long,
+ val interrupt: Int = Int.MaxValue,
+ val priority: Task.Priority = Task.Medium
+) extends Ordered[Task] {
+
+ val promise: Promise[TaskMetrics] = Promise()
+
+ var cost: Long = initialCost
+
+ // execute will be called once by each associated TaskResource
+ def complete(metrics: TaskMetrics) = if (!promise.isCompleted) promise.success(metrics)
+
+ def addCost(extra: Long) = cost += extra
+
+ def nextPossibleStart(currentTime: Long, resourceMap: Map[String, TaskResource]) = {
+ (currentTime /: resources) {
+ case (i, rN) =>
+ resourceMap.get(rN) match {
+ case None => throw new RuntimeException(s"Resource $rN not found!")
+ case Some(r) => Math.max(i, r.nextAvailableTimestamp(currentTime))
+ }
+ }
}
- def taskResources(resourceMap:Map[String,TaskResource]) = resources flatMap (resourceMap.get(_))
+ def taskResources(resourceMap: Map[String, TaskResource]) = resources flatMap (resourceMap.get(_))
-
- def compare(that:Task) = {
- lazy val cPriority = that.priority.compare(this.priority)
+ def compare(that: Task) = {
+ lazy val cPriority = that.priority.compare(this.priority)
lazy val cResources = that.resources.size.compare(this.resources.size)
- lazy val cAge = this.created.compare(that.created)
- lazy val cDuration = that.estimatedDuration.compare(this.estimatedDuration)
+ lazy val cAge = this.created.compare(that.created)
+ lazy val cDuration = that.estimatedDuration.compare(this.estimatedDuration)
lazy val cInterrupt = this.interrupt.compare(that.interrupt)
- lazy val cID = this.id.compare(that.id)
-
+ lazy val cID = this.id.compare(that.id)
+
if (cPriority != 0) cPriority
else if (cAge != 0) cAge
else if (cResources != 0) cResources
@@ -66,35 +68,36 @@ class Task (
else if (cInterrupt != 0) cInterrupt
else cID
}
-
+
override def toString = {
val res = resources.mkString(",")
s"Task($name)($res)"
}
}
-case class TaskGenerator (
- name :String,
- simulation:String,
- duration:ValueGenerator[Long],
- cost:ValueGenerator[Long],
- interrupt:Int=(-1),
- priority:Task.Priority=Task.Medium,
- createTime:Long=(-1)
+case class TaskGenerator(
+ name: String,
+ simulation: String,
+ duration: ValueGenerator[Long],
+ cost: ValueGenerator[Long],
+ interrupt: Int = (-1),
+ priority: Task.Priority = Task.Medium,
+ createTime: Long = (-1)
) {
- def create(id:Long, time:Long, resources:String*) = new Task(id,name,simulation,time,resources,duration.get,duration.estimate,cost.get,interrupt,priority)
- def withPriority(p:Task.Priority) = copy(priority = p)
- def withInterrupt(int:Int) = copy(interrupt = int)
- def withDuration(dur:ValueGenerator[Long]) = copy(duration = dur)
- def withName(n:String) = copy(name = n)
- def withSimulation(s:String) = copy(simulation=s)
- def withCreationTime(t:Long) = copy(createTime=t)
-
- def addTo(coordinator:ActorRef, resources:String*)(implicit system: ActorSystem) = {
+ def create(id: Long, time: Long, resources: String*) =
+ new Task(id, name, simulation, time, resources, duration.get, duration.estimate, cost.get, interrupt, priority)
+ def withPriority(p: Task.Priority) = copy(priority = p)
+ def withInterrupt(int: Int) = copy(interrupt = int)
+ def withDuration(dur: ValueGenerator[Long]) = copy(duration = dur)
+ def withName(n: String) = copy(name = n)
+ def withSimulation(s: String) = copy(simulation = s)
+ def withCreationTime(t: Long) = copy(createTime = t)
+
+ def addTo(coordinator: ActorRef, resources: String*)(implicit system: ActorSystem) = {
//implicit val timeout = Timeout(1.second)
// change this to ? to require an acknowledgement
val promise = Promise[TaskMetrics]()
- coordinator ! Coordinator.AddTask(this,promise,resources)
+ coordinator ! Coordinator.AddTask(this, promise, resources)
promise.future
}
diff --git a/src/com/workflowfm/pew/simulation/TaskResource.scala b/src/com/workflowfm/pew/simulation/TaskResource.scala
index 5ea38c5..eeaac16 100644
--- a/src/com/workflowfm/pew/simulation/TaskResource.scala
+++ b/src/com/workflowfm/pew/simulation/TaskResource.scala
@@ -5,58 +5,61 @@ import com.workflowfm.pew.simulation.metrics._
object TaskResource {
sealed trait State
- case object Busy extends State
- case class Finished(t:Task) extends State
- case object Idle extends State
+ case object Busy extends State
+ case class Finished(t: Task) extends State
+ case object Idle extends State
}
-class TaskResource(val name:String,val costPerTick:Int) {
- var currentTask :Option[(Long,Task)] = None
- var lastUpdate :Long = 1
-
- def isIdle :Boolean = currentTask == None
-
- def finishTask(currentTime:Long) :Option[Task] = currentTask match {
+class TaskResource(val name: String, val costPerTick: Int) {
+ var currentTask: Option[(Long, Task)] = None
+ var lastUpdate: Long = 1
+
+ def isIdle: Boolean = currentTask == None
+
+ def finishTask(currentTime: Long): Option[Task] = currentTask match {
case None => {
- //println("["+currentTime+"] \"" + name + "\" is idle.")
- None
+ //println("["+currentTime+"] \"" + name + "\" is idle.")
+ None
}
- case Some((startTime,task)) =>
+ case Some((startTime, task)) =>
if (currentTime >= startTime + task.duration) {
- println("["+currentTime+"] \"" + name + "\" detached from task \"" + task.name + " (" + task.simulation +")\".")
+ println(
+ "[" + currentTime + "] \"" + name + "\" detached from task \"" + task.name + " (" + task.simulation + ")\"."
+ )
currentTask = None
lastUpdate = currentTime
Some(task)
- }
- else {
+ } else {
//println("["+currentTime+"] \"" + name + "\" is attached to task \"" + task.name + " (" + task.simulation +")\" - " + (startTime + duration - currentTime) + " ticks remaining.")
None
}
}
-
- def startTask(task:Task,currentTime:Long) = {
+
+ def startTask(task: Task, currentTime: Long) = {
currentTask match {
case None => {
- println("["+currentTime+"] \"" + name + "\" is NOW attached to task \"" + task.name + " (" + task.simulation +")\" - " + task.duration + " ticks remaining.")
- currentTask = Some(currentTime,task)
+ println(
+ "[" + currentTime + "] \"" + name + "\" is NOW attached to task \"" + task.name + " (" + task.simulation + ")\" - " + task.duration + " ticks remaining."
+ )
+ currentTask = Some(currentTime, task)
lastUpdate = currentTime
true
}
- case Some((_,currentTask)) => {
- println("["+currentTime+"] <*> <*> <*> ERROR <*> <*> <*> \"" + name + "\" tried to attach to \"" + task.name + " (" + task.simulation +")\" but is already attached to \"" + currentTask.name + "\"!")
+ case Some((_, currentTask)) => {
+ println(
+ "[" + currentTime + "] <*> <*> <*> ERROR <*> <*> <*> \"" + name + "\" tried to attach to \"" + task.name + " (" + task.simulation + ")\" but is already attached to \"" + currentTask.name + "\"!"
+ )
false
}
}
}
-
-
- def nextAvailableTimestamp(currentTime:Long) :Long = currentTask match {
+
+ def nextAvailableTimestamp(currentTime: Long): Long = currentTask match {
case None => currentTime
- case Some((startTime,t)) => {
+ case Some((startTime, t)) => {
startTime + t.estimatedDuration
}
}
-
-
- def update(time:Long) = lastUpdate = time
-}
\ No newline at end of file
+
+ def update(time: Long) = lastUpdate = time
+}
diff --git a/src/com/workflowfm/pew/simulation/metrics/Actor.scala b/src/com/workflowfm/pew/simulation/metrics/Actor.scala
index cba860f..98a802a 100644
--- a/src/com/workflowfm/pew/simulation/metrics/Actor.scala
+++ b/src/com/workflowfm/pew/simulation/metrics/Actor.scala
@@ -8,44 +8,45 @@ import com.workflowfm.pew.execution.SimulatorExecutor
import com.workflowfm.pew.simulation.Simulation
import com.workflowfm.pew.simulation.Coordinator
-
object SimMetricsActor {
- case class Start(coordinator:ActorRef)
- case class StartSims(coordinator:ActorRef,sims:Seq[(Long,Simulation)],executor:SimulatorExecutor[_])
- case class StartSimsNow(coordinator:ActorRef,sims:Seq[Simulation],executor:SimulatorExecutor[_])
-
- def props(m:SimMetricsOutput, callbackActor:Option[ActorRef]=None)(implicit system: ActorSystem): Props = Props(new SimMetricsActor(m,callbackActor)(system))
+ case class Start(coordinator: ActorRef)
+ case class StartSims(coordinator: ActorRef, sims: Seq[(Long, Simulation)], executor: SimulatorExecutor[_])
+ case class StartSimsNow(coordinator: ActorRef, sims: Seq[Simulation], executor: SimulatorExecutor[_])
+
+ def props(m: SimMetricsOutput, callbackActor: Option[ActorRef] = None)(implicit system: ActorSystem): Props =
+ Props(new SimMetricsActor(m, callbackActor)(system))
}
-// Provide a callbackActor to get a response when we are done. Otherwise we'll shutdown the ActorSystem
+// Provide a callbackActor to get a response when we are done. Otherwise we'll shutdown the ActorSystem
+
+class SimMetricsActor(m: SimMetricsOutput, callbackActor: Option[ActorRef])(implicit system: ActorSystem)
+ extends Actor {
+ var coordinator: Option[ActorRef] = None
-class SimMetricsActor(m:SimMetricsOutput, callbackActor:Option[ActorRef])(implicit system: ActorSystem) extends Actor {
- var coordinator:Option[ActorRef] = None
-
def receive = {
case SimMetricsActor.Start(coordinator) if this.coordinator.isEmpty => {
this.coordinator = Some(coordinator)
coordinator ! Coordinator.Start
}
-
- case SimMetricsActor.StartSims(coordinator,sims,executor) if this.coordinator.isEmpty => {
+
+ case SimMetricsActor.StartSims(coordinator, sims, executor) if this.coordinator.isEmpty => {
this.coordinator = Some(coordinator)
- coordinator ! Coordinator.AddSims(sims,executor)
+ coordinator ! Coordinator.AddSims(sims, executor)
coordinator ! Coordinator.Start
}
-
- case SimMetricsActor.StartSimsNow(coordinator,sims,executor) if this.coordinator.isEmpty => {
+
+ case SimMetricsActor.StartSimsNow(coordinator, sims, executor) if this.coordinator.isEmpty => {
this.coordinator = Some(coordinator)
- coordinator ! Coordinator.AddSimsNow(sims,executor)
+ coordinator ! Coordinator.AddSimsNow(sims, executor)
coordinator ! Coordinator.Start
}
-
- case Coordinator.Done(t:Long,ma:SimMetricsAggregator) if this.coordinator == Some(sender) => {
+
+ case Coordinator.Done(t: Long, ma: SimMetricsAggregator) if this.coordinator == Some(sender) => {
this.coordinator = None
- m(t,ma)
+ m(t, ma)
callbackActor match {
- case None => system.terminate()
- case Some(actor) => actor ! Coordinator.Done(t,ma)
+ case None => system.terminate()
+ case Some(actor) => actor ! Coordinator.Done(t, ma)
}
}
}
diff --git a/src/com/workflowfm/pew/simulation/metrics/Measure.scala b/src/com/workflowfm/pew/simulation/metrics/Measure.scala
index 7f2a6e3..25f0767 100644
--- a/src/com/workflowfm/pew/simulation/metrics/Measure.scala
+++ b/src/com/workflowfm/pew/simulation/metrics/Measure.scala
@@ -2,7 +2,6 @@ package com.workflowfm.pew.simulation.metrics
import com.workflowfm.pew.simulation._
-
//trait Metrics {
// def stringValues :List[String]
// def values(sep:String=",") = stringValues.mkString(sep)
@@ -16,23 +15,23 @@ import com.workflowfm.pew.simulation._
//object TaskMetrics {
// def header(sep:String) = List("Task","Start","Delay","Duration","Cost","Workflow","Resources").mkString(sep)
//}
-case class TaskMetrics (
- id:Long,
- task:String,
- simulation:String,
- created:Long,
- started:Option[Long],
- duration:Long,
- cost:Long,
- resources:Seq[String]
- ) {
+case class TaskMetrics(
+ id: Long,
+ task: String,
+ simulation: String,
+ created: Long,
+ started: Option[Long],
+ duration: Long,
+ cost: Long,
+ resources: Seq[String]
+) {
//override def stringValues = List(task,start,delay,duration,cost,workflow,"\"" + resources.mkString(",") + "\"") map (_.toString)
// def addDelay(d :Long) = copy(delay = delay + d)
// def addDuration(d :Long) = copy(duration = duration + d)
// def addCost(c:Long) = copy(cost = cost + c)
- def start(st:Long) = copy(started=Some(st))
+ def start(st: Long) = copy(started = Some(st))
def delay = started match {
- case None => 0L
+ case None => 0L
case Some(s) => s - created
}
def fullName = s"$task($simulation)"
@@ -45,100 +44,109 @@ case class TaskMetrics (
// }
}
object TaskMetrics {
- def apply(task:Task):TaskMetrics = TaskMetrics(task.id, task.name, task.simulation, task.created, None, task.duration, task.cost, task.resources)
+ def apply(task: Task): TaskMetrics =
+ TaskMetrics(task.id, task.name, task.simulation, task.created, None, task.duration, task.cost, task.resources)
}
case class SimulationMetrics(
- name:String,
- started:Long,
- duration:Long,
- delay:Long,
- tasks:Int,
- cost:Long,
- result:Option[String]
- ) {
- def addDuration(d:Long) = copy(duration = duration + d)
- def addCost(c:Long) = copy(cost = cost + c)
- def addDelay(d:Long) = copy(delay = delay + d)
- def task(task:Task) = copy(tasks = tasks + 1, cost = cost + task.cost)
- def done(res:String, time:Long) = copy( result = Some(res), duration = duration + time - started ) // TODO should this and result be one?
+ name: String,
+ started: Long,
+ duration: Long,
+ delay: Long,
+ tasks: Int,
+ cost: Long,
+ result: Option[String]
+) {
+ def addDuration(d: Long) = copy(duration = duration + d)
+ def addCost(c: Long) = copy(cost = cost + c)
+ def addDelay(d: Long) = copy(delay = delay + d)
+ def task(task: Task) = copy(tasks = tasks + 1, cost = cost + task.cost)
+ def done(res: String, time: Long) =
+ copy(result = Some(res), duration = duration + time - started) // TODO should this and result be one?
}
object SimulationMetrics {
- def apply(name:String, t:Long):SimulationMetrics = SimulationMetrics(name,t,0L,0L,0,0L,None)
+ def apply(name: String, t: Long): SimulationMetrics = SimulationMetrics(name, t, 0L, 0L, 0, 0L, None)
}
-case class ResourceMetrics (
- name:String,
- busyTime:Long,
- idleTime:Long,
- tasks:Int,
- cost:Long
- ) {
- def idle(i:Long) = copy(idleTime = idleTime + i)
- def task(task:Task, costPerTick:Long) = copy(
- tasks = tasks + 1,
- cost = cost + task.duration * costPerTick,
- busyTime = busyTime + task.duration
- )
+case class ResourceMetrics(
+ name: String,
+ busyTime: Long,
+ idleTime: Long,
+ tasks: Int,
+ cost: Long
+) {
+ def idle(i: Long) = copy(idleTime = idleTime + i)
+ def task(task: Task, costPerTick: Long) = copy(
+ tasks = tasks + 1,
+ cost = cost + task.duration * costPerTick,
+ busyTime = busyTime + task.duration
+ )
}
object ResourceMetrics {
- def apply(name:String):ResourceMetrics = ResourceMetrics(name,0L,0L,0,0L)
- def apply(r:TaskResource):ResourceMetrics = ResourceMetrics(r.name,0L,0L,0,0L)
+ def apply(name: String): ResourceMetrics = ResourceMetrics(name, 0L, 0L, 0, 0L)
+ def apply(r: TaskResource): ResourceMetrics = ResourceMetrics(r.name, 0L, 0L, 0, 0L)
}
class SimMetricsAggregator {
import scala.collection.immutable.Map
- var start:Option[Long] = None
- var end:Option[Long] = None
+ var start: Option[Long] = None
+ var end: Option[Long] = None
def started = start match {
case None => start = Some(System.currentTimeMillis())
- case _ => ()
+ case _ => ()
}
def ended = end = Some(System.currentTimeMillis())
- val taskMap = scala.collection.mutable.Map[Long,TaskMetrics]()
- val simMap = scala.collection.mutable.Map[String,SimulationMetrics]()
- val resourceMap = scala.collection.mutable.Map[String,ResourceMetrics]()
-
-
- // Set
-
- def +=(m:TaskMetrics):TaskMetrics = { taskMap += (m.id->m) ; m }
- def +=(m:SimulationMetrics):SimulationMetrics = { simMap += (m.name->m) ; m }
- def +=(m:ResourceMetrics):ResourceMetrics = { resourceMap += (m.name->m) ; m }
-
- def +=(task:Task):TaskMetrics = this += TaskMetrics(task)
- def +=(s:Simulation,t:Long):SimulationMetrics = this += SimulationMetrics(s.name,t)
- def +=(r:TaskResource):ResourceMetrics = this += ResourceMetrics(r)
-
-
+ val taskMap = scala.collection.mutable.Map[Long, TaskMetrics]()
+ val simMap = scala.collection.mutable.Map[String, SimulationMetrics]()
+ val resourceMap = scala.collection.mutable.Map[String, ResourceMetrics]()
+
+ // Set
+
+ def +=(m: TaskMetrics): TaskMetrics = { taskMap += (m.id -> m); m }
+ def +=(m: SimulationMetrics): SimulationMetrics = { simMap += (m.name -> m); m }
+ def +=(m: ResourceMetrics): ResourceMetrics = { resourceMap += (m.name -> m); m }
+
+ def +=(task: Task): TaskMetrics = this += TaskMetrics(task)
+ def +=(s: Simulation, t: Long): SimulationMetrics = this += SimulationMetrics(s.name, t)
+ def +=(r: TaskResource): ResourceMetrics = this += ResourceMetrics(r)
+
// Update
-
- def ^(taskID:Long)(u:TaskMetrics=>TaskMetrics):Option[TaskMetrics] =
- taskMap.get(taskID).map { m => this += u(m) }
- def ^(task:Task)(u:TaskMetrics=>TaskMetrics):Option[TaskMetrics] =
- taskMap.get(task.id).map { m => this += u(m) }
- def ^(simulation:Simulation)(u:SimulationMetrics=>SimulationMetrics):Option[SimulationMetrics] =
- simMap.get(simulation.name).map { m => this += u(m) }
- def ^(simulationName:String)(u:SimulationMetrics=>SimulationMetrics):Option[SimulationMetrics] =
- simMap.get(simulationName).map { m => this += u(m) }
-// def ^(resource:String)(u:ResourceMetrics=>ResourceMetrics):Option[ResourceMetrics] =
+
+ def ^(taskID: Long)(u: TaskMetrics => TaskMetrics): Option[TaskMetrics] =
+ taskMap.get(taskID).map { m =>
+ this += u(m)
+ }
+ def ^(task: Task)(u: TaskMetrics => TaskMetrics): Option[TaskMetrics] =
+ taskMap.get(task.id).map { m =>
+ this += u(m)
+ }
+ def ^(simulation: Simulation)(u: SimulationMetrics => SimulationMetrics): Option[SimulationMetrics] =
+ simMap.get(simulation.name).map { m =>
+ this += u(m)
+ }
+ def ^(simulationName: String)(u: SimulationMetrics => SimulationMetrics): Option[SimulationMetrics] =
+ simMap.get(simulationName).map { m =>
+ this += u(m)
+ }
+// def ^(resource:String)(u:ResourceMetrics=>ResourceMetrics):Option[ResourceMetrics] =
// resourceMap.get(resource).map { m => this += u(m) }
- def ^(resource:TaskResource)(u:ResourceMetrics=>ResourceMetrics):Option[ResourceMetrics] =
- resourceMap.get(resource.name).map { m => this += u(m) }
-
-
+ def ^(resource: TaskResource)(u: ResourceMetrics => ResourceMetrics): Option[ResourceMetrics] =
+ resourceMap.get(resource.name).map { m =>
+ this += u(m)
+ }
+
// Getters
-
- def taskMetrics = taskMap.values.toSeq.sortBy(_.started)
+
+ def taskMetrics = taskMap.values.toSeq.sortBy(_.started)
def simulationMetrics = simMap.values.toSeq.sortBy(_.name)
- def resourceMetrics = resourceMap.values.toSeq.sortBy(_.name)
- def taskSet = taskMap.values.map(_.task).toSet[String]
-
+ def resourceMetrics = resourceMap.values.toSeq.sortBy(_.name)
+ def taskSet = taskMap.values.map(_.task).toSet[String]
+
// TODO: we used to have 2 levels of sorting!
- def taskMetricsOf(r:ResourceMetrics) = taskMap.values.toSeq.filter(_.resources.contains(r.name)).sortBy(_.started)
- def taskMetricsOf(s:SimulationMetrics) = taskMap.values.toSeq.filter(_.simulation.equals(s.name)).sortBy(_.started)
+ def taskMetricsOf(r: ResourceMetrics) = taskMap.values.toSeq.filter(_.resources.contains(r.name)).sortBy(_.started)
+ def taskMetricsOf(s: SimulationMetrics) = taskMap.values.toSeq.filter(_.simulation.equals(s.name)).sortBy(_.started)
}
diff --git a/src/com/workflowfm/pew/simulation/metrics/Output.scala b/src/com/workflowfm/pew/simulation/metrics/Output.scala
index 07b28ed..83f884b 100644
--- a/src/com/workflowfm/pew/simulation/metrics/Output.scala
+++ b/src/com/workflowfm/pew/simulation/metrics/Output.scala
@@ -1,115 +1,114 @@
package com.workflowfm.pew.simulation.metrics
import scala.collection.immutable.Queue
-import com.workflowfm.pew.metrics.{ FileOutput, MetricsOutput }
+import com.workflowfm.pew.metrics.{FileOutput, MetricsOutput}
-trait SimMetricsOutput extends ((Long,SimMetricsAggregator) => Unit) {
- def and(h:SimMetricsOutput) = SimMetricsOutputs(this,h)
+trait SimMetricsOutput extends ((Long, SimMetricsAggregator) => Unit) {
+ def and(h: SimMetricsOutput) = SimMetricsOutputs(this, h)
}
-case class SimMetricsOutputs(handlers:Queue[SimMetricsOutput]) extends SimMetricsOutput {
- override def apply(time:Long,aggregator:SimMetricsAggregator) = handlers map (_.apply(time,aggregator))
- override def and(h:SimMetricsOutput) = SimMetricsOutputs(handlers :+ h)
+case class SimMetricsOutputs(handlers: Queue[SimMetricsOutput]) extends SimMetricsOutput {
+ override def apply(time: Long, aggregator: SimMetricsAggregator) = handlers map (_.apply(time, aggregator))
+ override def and(h: SimMetricsOutput) = SimMetricsOutputs(handlers :+ h)
}
object SimMetricsOutputs {
- def apply(handlers:SimMetricsOutput*):SimMetricsOutputs = SimMetricsOutputs(Queue[SimMetricsOutput]() ++ handlers)
- def none = SimMetricsOutputs()
+ def apply(handlers: SimMetricsOutput*): SimMetricsOutputs = SimMetricsOutputs(Queue[SimMetricsOutput]() ++ handlers)
+ def none = SimMetricsOutputs()
}
-
trait SimMetricsStringOutput extends SimMetricsOutput {
val nullValue = "NULL"
-
- def taskHeader(separator:String) = Seq("ID","Task","Simulation","Created","Start","Delay","Duration","Cost","Resources").mkString(separator)
- def taskCSV(separator:String, resSeparator:String)(m:TaskMetrics) = m match {
- case TaskMetrics(id,task,sim,ct,st,dur,cost,res) =>
- Seq(id,task,sim,ct,MetricsOutput.formatOption(st,nullValue),m.delay,dur,cost,res.mkString(resSeparator)).mkString(separator)
+
+ def taskHeader(separator: String) =
+ Seq("ID", "Task", "Simulation", "Created", "Start", "Delay", "Duration", "Cost", "Resources").mkString(separator)
+ def taskCSV(separator: String, resSeparator: String)(m: TaskMetrics) = m match {
+ case TaskMetrics(id, task, sim, ct, st, dur, cost, res) =>
+ Seq(id, task, sim, ct, MetricsOutput.formatOption(st, nullValue), m.delay, dur, cost, res.mkString(resSeparator))
+ .mkString(separator)
}
-
- def simHeader(separator:String) = Seq("Name","Start","Duration","Delay","Tasks","Cost","Result").mkString(separator)
- def simCSV(separator:String)(m:SimulationMetrics) = m match {
- case SimulationMetrics(name,st,dur,delay,ts,c,res) =>
- Seq(name,st,dur,delay,ts,c,res).mkString(separator)
+
+ def simHeader(separator: String) =
+ Seq("Name", "Start", "Duration", "Delay", "Tasks", "Cost", "Result").mkString(separator)
+ def simCSV(separator: String)(m: SimulationMetrics) = m match {
+ case SimulationMetrics(name, st, dur, delay, ts, c, res) =>
+ Seq(name, st, dur, delay, ts, c, res).mkString(separator)
}
- def resHeader(separator:String) = Seq("Name","Busy","Idle","Tasks","Cost").mkString(separator)
- def resCSV(separator:String)(m:ResourceMetrics) = m match {
- case ResourceMetrics(name,b,i,ts,c) =>
- Seq(name,b,i,ts,c).mkString(separator)
+ def resHeader(separator: String) = Seq("Name", "Busy", "Idle", "Tasks", "Cost").mkString(separator)
+ def resCSV(separator: String)(m: ResourceMetrics) = m match {
+ case ResourceMetrics(name, b, i, ts, c) =>
+ Seq(name, b, i, ts, c).mkString(separator)
}
-
- def tasks(aggregator:SimMetricsAggregator,separator:String,lineSep:String="\n",resSeparator:String=";") =
- aggregator.taskMetrics.map(taskCSV(separator,resSeparator)).mkString(lineSep)
- def simulations(aggregator:SimMetricsAggregator,separator:String, lineSep:String="\n") =
+ def tasks(aggregator: SimMetricsAggregator, separator: String, lineSep: String = "\n", resSeparator: String = ";") =
+ aggregator.taskMetrics.map(taskCSV(separator, resSeparator)).mkString(lineSep)
+ def simulations(aggregator: SimMetricsAggregator, separator: String, lineSep: String = "\n") =
aggregator.simulationMetrics.map(simCSV(separator)).mkString(lineSep)
- def resources(aggregator:SimMetricsAggregator,separator:String, lineSep:String="\n") =
+ def resources(aggregator: SimMetricsAggregator, separator: String, lineSep: String = "\n") =
aggregator.resourceMetrics.map(resCSV(separator)).mkString(lineSep)
}
-
-class SimMetricsPrinter extends SimMetricsStringOutput {
- def apply(totalTicks:Long,aggregator:SimMetricsAggregator) = {
- val sep = "\t| "
- val lineSep = "\n"
+class SimMetricsPrinter extends SimMetricsStringOutput {
+ def apply(totalTicks: Long, aggregator: SimMetricsAggregator) = {
+ val sep = "\t| "
+ val lineSep = "\n"
val timeFormat = "YYYY-MM-dd HH:mm:ss.SSS"
- val durFormat = "HH:mm:ss.SSS"
- val nullTime = "NONE"
+ val durFormat = "HH:mm:ss.SSS"
+ val nullTime = "NONE"
println(
-s"""
+ s"""
Tasks
-----
${taskHeader(sep)}
-${tasks(aggregator,sep,lineSep)}
+${tasks(aggregator, sep, lineSep)}
Simulations
-----------
${simHeader(sep)}
-${simulations(aggregator,sep,lineSep)}
+${simulations(aggregator, sep, lineSep)}
Resources
---------
${resHeader(sep)}
-${resources(aggregator,sep,lineSep)}
+${resources(aggregator, sep, lineSep)}
---------
Started: ${MetricsOutput.formatTimeOption(aggregator.start, timeFormat, nullTime)}
Ended: ${MetricsOutput.formatTimeOption(aggregator.end, timeFormat, nullTime)}
Duration: ${MetricsOutput.formatDuration(aggregator.start, aggregator.end, durFormat, nullTime)}
"""
- )
+ )
}
}
-class SimCSVFileOutput(path:String,name:String) extends SimMetricsStringOutput with FileOutput {
+class SimCSVFileOutput(path: String, name: String) extends SimMetricsStringOutput with FileOutput {
import java.io._
val separator = ","
- val lineSep = "\n"
-
- def apply(totalTicks:Long,aggregator:SimMetricsAggregator) = {
- val taskFile = s"$path$name-tasks.csv"
+ val lineSep = "\n"
+
+ def apply(totalTicks: Long, aggregator: SimMetricsAggregator) = {
+ val taskFile = s"$path$name-tasks.csv"
val simulationFile = s"$path$name-simulations.csv"
- val resourceFile = s"$path$name-resources.csv"
- writeToFile(taskFile, taskHeader(separator) + "\n" + tasks(aggregator,separator,lineSep))
- writeToFile(simulationFile, simHeader(separator) + "\n" + simulations(aggregator,separator,lineSep))
- writeToFile(resourceFile, resHeader(separator) + "\n" + resources(aggregator,separator,lineSep))
+ val resourceFile = s"$path$name-resources.csv"
+ writeToFile(taskFile, taskHeader(separator) + "\n" + tasks(aggregator, separator, lineSep))
+ writeToFile(simulationFile, simHeader(separator) + "\n" + simulations(aggregator, separator, lineSep))
+ writeToFile(resourceFile, resHeader(separator) + "\n" + resources(aggregator, separator, lineSep))
}
}
-
-class SimD3Timeline(path:String,file:String,tick:Int=1) extends SimMetricsOutput with FileOutput {
+class SimD3Timeline(path: String, file: String, tick: Int = 1) extends SimMetricsOutput with FileOutput {
import java.io._
-
- override def apply(totalTicks:Long, aggregator:SimMetricsAggregator) = {
- val result = build(aggregator,System.currentTimeMillis())
+
+ override def apply(totalTicks: Long, aggregator: SimMetricsAggregator) = {
+ val result = build(aggregator, System.currentTimeMillis())
println(result)
val dataFile = s"$path$file-simdata.js"
writeToFile(dataFile, result)
}
-
- def build(aggregator:SimMetricsAggregator, now:Long) = {
- var buf:StringBuilder = StringBuilder.newBuilder
+
+ def build(aggregator: SimMetricsAggregator, now: Long) = {
+ var buf: StringBuilder = StringBuilder.newBuilder
buf.append("var tasks = [\n")
for (p <- aggregator.taskSet) buf.append(s"""\t"$p",\n""")
buf.append("];\n\n")
@@ -121,27 +120,26 @@ class SimD3Timeline(path:String,file:String,tick:Int=1) extends SimMetricsOutput
buf.append("];\n")
buf.toString
}
-
- def simulationEntry(s:SimulationMetrics,agg:SimMetricsAggregator) = {
+
+ def simulationEntry(s: SimulationMetrics, agg: SimMetricsAggregator) = {
val times = agg.taskMetricsOf(s).map(taskEntry).mkString(",\n")
-s"""{label: "${s.name}", times: [
+ s"""{label: "${s.name}", times: [
$times
]},"""
}
-
- def resourceEntry(res:ResourceMetrics,agg:SimMetricsAggregator) = {
+
+ def resourceEntry(res: ResourceMetrics, agg: SimMetricsAggregator) = {
val times = agg.taskMetricsOf(res).map(taskEntry).mkString(",\n")
-s"""{label: "${res.name}", times: [
+ s"""{label: "${res.name}", times: [
$times
]},"""
}
-
- def taskEntry(m:TaskMetrics) = {
+ def taskEntry(m: TaskMetrics) = {
// we don't want it to be 0 because D3 doesn't deal with it well
- val start = m.started.getOrElse(1L) * tick
+ val start = m.started.getOrElse(1L) * tick
val finish = (m.started.getOrElse(1L) + m.duration) * tick
- val delay = m.delay * tick
+ val delay = m.delay * tick
s"""\t{"label":"${m.fullName}", task: "${m.task}", "id":${m.id}, "starting_time": $start, "ending_time": $finish, delay: $delay, cost: ${m.cost}}"""
}
}
diff --git a/src/com/workflowfm/pew/stateless/StatelessExecutor.scala b/src/com/workflowfm/pew/stateless/StatelessExecutor.scala
index 412eabf..94d0840 100644
--- a/src/com/workflowfm/pew/stateless/StatelessExecutor.scala
+++ b/src/com/workflowfm/pew/stateless/StatelessExecutor.scala
@@ -11,24 +11,21 @@ import scala.concurrent.{Future, _}
*
* @param message Message indicating cause of failure.
*/
-class ShutdownExecutorException( message: String ) extends Exception( message )
+class ShutdownExecutorException(message: String) extends Exception(message)
/** Boots up necessary Workflow actors for a "stateless" (no RAM) workflow execution.
*
*/
-abstract class StatelessExecutor[KeyT]
- extends ProcessExecutor[KeyT] { this: PiObservable[KeyT] =>
+abstract class StatelessExecutor[KeyT] extends ProcessExecutor[KeyT] { this: PiObservable[KeyT] =>
def shutdown: Future[Done]
def forceShutdown: Future[Done] = shutdown
- final def syncShutdown( timeout: Duration = Duration.Inf ): Done
- = Await.result( shutdown, timeout )
+ final def syncShutdown(timeout: Duration = Duration.Inf): Done = Await.result(shutdown, timeout)
}
// TODO, Should really be (PiInstance, CallRefID: Int)
-case class CallRef( id: Int )
+case class CallRef(id: Int)
object CallRef {
- val IDLE: CallRef = CallRef( 0 )
+ val IDLE: CallRef = CallRef(0)
}
-
diff --git a/src/com/workflowfm/pew/stateless/StatelessMessages.scala b/src/com/workflowfm/pew/stateless/StatelessMessages.scala
index 8ff6d38..b1e5f6b 100644
--- a/src/com/workflowfm/pew/stateless/StatelessMessages.scala
+++ b/src/com/workflowfm/pew/stateless/StatelessMessages.scala
@@ -38,13 +38,12 @@ object StatelessMessages {
* @param args A collection of result objects for open threads that need to be posted.
*/
case class ReduceRequest(
- pii: PiInstance[ObjectId],
- args: Seq[CallResult]
+ pii: PiInstance[ObjectId],
+ args: Seq[CallResult]
+ ) extends AnyMsg
+ with HasPii {
- ) extends AnyMsg with HasPii {
-
- override def toString: String
- = s"ReduceRequest($piiId, $args)"
+ override def toString: String = s"ReduceRequest($piiId, $args)"
}
/** Emitted by a AtomicProcess executors to sequence their results into a common timeline.
@@ -56,9 +55,8 @@ object StatelessMessages {
* PiObject representing the result.
*/
case class SequenceRequest(
- piiId: ObjectId,
- request: CallResult
-
+ piiId: ObjectId,
+ request: CallResult
) extends PiiHistory
/** A PiiHistory message which helps collect outstanding SequenceRequests after a failure.
@@ -72,20 +70,17 @@ object StatelessMessages {
* @param errors All the known exceptions encountered during the execution of this PiInstance.
*/
case class SequenceFailure(
- pii: Either[ObjectId, PiInstance[ObjectId]],
- returns: Seq[CallResult],
- errors: Seq[PiFailure[ObjectId]]
-
+ pii: Either[ObjectId, PiInstance[ObjectId]],
+ returns: Seq[CallResult],
+ errors: Seq[PiFailure[ObjectId]]
) extends PiiHistory {
- override def piiId: ObjectId
- = pii match {
- case Left( _piiId ) => _piiId
- case Right( _pii ) => _pii.id
- }
+ override def piiId: ObjectId = pii match {
+ case Left(_piiId) => _piiId
+ case Right(_pii) => _pii.id
+ }
- override def toString: String
- = s"SequenceFailure($piiId, $returns, $errors)"
+ override def toString: String = s"SequenceFailure($piiId, $returns, $errors)"
}
object SequenceFailure {
@@ -97,8 +92,8 @@ object StatelessMessages {
* @param piEx The exception which was encountered.
* @return A Sequence failure object containing a single CallRef and ExceptionEvent.
*/
- def apply( id: ObjectId, ref: CallRef, piEx: PiException[ObjectId] ): SequenceFailure
- = SequenceFailure( Left(id), Seq((ref, null)), Seq( piEx.event ) )
+ def apply(id: ObjectId, ref: CallRef, piEx: PiException[ObjectId]): SequenceFailure =
+ SequenceFailure(Left(id), Seq((ref, null)), Seq(piEx.event))
}
/** Emitted by the Reducer component to log the latest PiInstance state to the PiHistory.
@@ -107,9 +102,9 @@ object StatelessMessages {
* @param pii The latest state of the PiInstance.
*/
case class PiiUpdate(
- pii: PiInstance[ObjectId]
-
- ) extends PiiHistory with HasPii
+ pii: PiInstance[ObjectId]
+ ) extends PiiHistory
+ with HasPii
/** Emitted by the Reducer to the AtomicProcess executors, assigns responsibility for
* executing an AtomicProcess to an AtomicProcessExecutor. Uniquely identified by the
@@ -121,15 +116,14 @@ object StatelessMessages {
* @param args The value of each of the arguments to the AtomicProcess.
*/
case class Assignment(
- pii: PiInstance[ObjectId],
- callRef: CallRef,
- process: String,
- args: Seq[PiResource]
-
- ) extends AnyMsg with HasPii {
-
- override def toString: String
- = s"Assignment($piiId, call ${callRef.id}, AP '$process', args $args)"
+ pii: PiInstance[ObjectId],
+ callRef: CallRef,
+ process: String,
+ args: Seq[PiResource]
+ ) extends AnyMsg
+ with HasPii {
+
+ override def toString: String = s"Assignment($piiId, call ${callRef.id}, AP '$process', args $args)"
}
/** An output message sent to the Results topic. Has no impact on PiInstance execution,
@@ -137,7 +131,7 @@ object StatelessMessages {
*
* @param event The `PiEvent` being logged.
*/
- case class PiiLog( event: PiEvent[ObjectId] ) extends AnyMsg {
+ case class PiiLog(event: PiEvent[ObjectId]) extends AnyMsg {
override def piiId: ObjectId = event.id
}
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/stateless/components/AtomicExecutor.scala b/src/com/workflowfm/pew/stateless/components/AtomicExecutor.scala
index d77255d..c4e67f6 100644
--- a/src/com/workflowfm/pew/stateless/components/AtomicExecutor.scala
+++ b/src/com/workflowfm/pew/stateless/components/AtomicExecutor.scala
@@ -9,28 +9,27 @@ import scala.concurrent._
/** Runs AtomicProcesses and handles all necessary communication with the various stateless workflow actors.
*
*/
-class AtomicExecutor( implicit exec: ExecutionContext )
- extends StatelessComponent[Assignment, Future[Seq[AnyMsg]]] {
+class AtomicExecutor(implicit exec: ExecutionContext) extends StatelessComponent[Assignment, Future[Seq[AnyMsg]]] {
override def respond: Assignment => Future[Seq[AnyMsg]] = {
- case Assignment( pii, ref, name, args ) =>
+ case Assignment(pii, ref, name, args) =>
+ def fail(piEx: PiException[ObjectId]): Future[SequenceFailure] =
+ Future.successful(SequenceFailure(pii.id, ref, piEx))
- def fail( piEx: PiException[ObjectId]): Future[SequenceFailure]
- = Future.successful( SequenceFailure( pii.id, ref, piEx ) )
-
- def failOther( t: Throwable ): Future[SequenceFailure]
- = fail( RemoteProcessException[ObjectId]( pii.id, ref.id, t ) )
+ def failOther(t: Throwable): Future[SequenceFailure] = fail(RemoteProcessException[ObjectId](pii.id, ref.id, t))
try {
- val proc: MetadataAtomicProcess = pii.getAtomicProc( name )
+ val proc: MetadataAtomicProcess = pii.getAtomicProc(name)
try {
- proc.runMeta( args.map(_.obj) )
+ proc
+ .runMeta(args.map(_.obj))
.map({
- case (res, meta) => Seq(
- SequenceRequest( pii.id, (ref, res) ),
- PiiLog( PiEventReturn( pii.id, ref.id, res, meta ) )
- )
+ case (res, meta) =>
+ Seq(
+ SequenceRequest(pii.id, (ref, res)),
+ PiiLog(PiEventReturn(pii.id, ref.id, res, meta))
+ )
})
.recoverWith({ case t: Throwable => failOther(t).map(Seq(_)) })
@@ -44,11 +43,11 @@ class AtomicExecutor( implicit exec: ExecutionContext )
}
} catch {
- case ex: UnknownProcessException[ObjectId] => fail( ex ).map(Seq(_))
- case ex: AtomicProcessIsCompositeException[ObjectId] => fail( ex ).map(Seq(_))
- case ex: Exception => failOther( ex ).map(Seq(_))
+ case ex: UnknownProcessException[ObjectId] => fail(ex).map(Seq(_))
+ case ex: AtomicProcessIsCompositeException[ObjectId] => fail(ex).map(Seq(_))
+ case ex: Exception => failOther(ex).map(Seq(_))
}
}
}
-case class PreExecutionException( t: Throwable ) extends Throwable
+case class PreExecutionException(t: Throwable) extends Throwable
diff --git a/src/com/workflowfm/pew/stateless/components/Reducer.scala b/src/com/workflowfm/pew/stateless/components/Reducer.scala
index 2eab794..59f6bbb 100644
--- a/src/com/workflowfm/pew/stateless/components/Reducer.scala
+++ b/src/com/workflowfm/pew/stateless/components/Reducer.scala
@@ -11,66 +11,68 @@ import scala.concurrent.ExecutionContext
* Separated from StatelessExecActor to ensure the WorkflowState message is actually posted before the tasks are executed.
*/
class Reducer(
- implicit exec: ExecutionContext
-
- ) extends StatelessComponent[ReduceRequest, Seq[AnyMsg]] {
+ implicit exec: ExecutionContext
+) extends StatelessComponent[ReduceRequest, Seq[AnyMsg]] {
import com.workflowfm.pew.stateless.StatelessMessages._
- override def respond: ReduceRequest => Seq[AnyMsg] = request
- => piiReduce( request.pii.postResult( request.args.map( a => (a._1.id, a._2)) ) )
+ override def respond: ReduceRequest => Seq[AnyMsg] =
+ request => piiReduce(request.pii.postResult(request.args.map(a => (a._1.id, a._2))))
- def piiReduce( pii: PiInstance[ObjectId] ): Seq[AnyMsg] = {
+ def piiReduce(pii: PiInstance[ObjectId]): Seq[AnyMsg] = {
val piiReduced: PiInstance[ObjectId] = pii.reduce
- if (piiReduced.completed) Seq( piiReduced.result match {
- case Some(result) => PiiLog( PiEventResult( piiReduced, result ) )
- case None => PiiLog( PiFailureNoResult( piiReduced ) )
+ if (piiReduced.completed) Seq(piiReduced.result match {
+ case Some(result) => PiiLog(PiEventResult(piiReduced, result))
+ case None => PiiLog(PiFailureNoResult(piiReduced))
- }) else {
+ })
+ else {
- val ( toCall, piiReady ) = handleThreads( piiReduced )
- val futureCalls = (toCall map CallRef.apply) zip ( toCall flatMap piiReady.piFutureOf )
+ val (toCall, piiReady) = handleThreads(piiReduced)
+ val futureCalls = (toCall map CallRef.apply) zip (toCall flatMap piiReady.piFutureOf)
- val updateMsg = PiiUpdate( piiReady )
- val requests = futureCalls flatMap (getMessages( piiReady )(_, _)).tupled
+ val updateMsg = PiiUpdate(piiReady)
+ val requests = futureCalls flatMap (getMessages(piiReady)(_, _)).tupled
requests :+ updateMsg
}
}
- def handleThreads( piInst: PiInstance[ObjectId] ): ( Seq[Int], PiInstance[ObjectId] ) = {
- piInst.handleThreads( handleThread( piInst ) )
+ def handleThreads(piInst: PiInstance[ObjectId]): (Seq[Int], PiInstance[ObjectId]) = {
+ piInst.handleThreads(handleThread(piInst))
}
// Jev, TODO confirm that exceptions here would've bottled the whole evaluation.
- def handleThread( i: PiInstance[ObjectId])( ref: Int, f: PiFuture ): Boolean = {
+ def handleThread(i: PiInstance[ObjectId])(ref: Int, f: PiFuture): Boolean = {
f match {
- case PiFuture(name, _, _) => i.getProc(name) match {
+ case PiFuture(name, _, _) =>
+ i.getProc(name) match {
- case Some(_: MetadataAtomicProcess) => true
+ case Some(_: MetadataAtomicProcess) => true
- case None => throw UnknownProcessException( i, name )
- case Some(_) => throw AtomicProcessIsCompositeException( i, name )
- }
+ case None => throw UnknownProcessException(i, name)
+ case Some(_) => throw AtomicProcessIsCompositeException(i, name)
+ }
}
}
- def getMessages( piReduced: PiInstance[ObjectId] )( ref: CallRef, fut: PiFuture ): Seq[AnyMsg] = {
+ def getMessages(piReduced: PiInstance[ObjectId])(ref: CallRef, fut: PiFuture): Seq[AnyMsg] = {
val piiId: ObjectId = piReduced.id
fut match {
case PiFuture(name, _, args) =>
piReduced.getProc(name) match {
// Request that the process be executed.
- case Some(p: MetadataAtomicProcess) => Seq(
- Assignment( piReduced, ref, p.name, args ),
- PiiLog( PiEventCall( piiId, ref.id, p, args map (_.obj) ) )
- )
+ case Some(p: MetadataAtomicProcess) =>
+ Seq(
+ Assignment(piReduced, ref, p.name, args),
+ PiiLog(PiEventCall(piiId, ref.id, p, args map (_.obj)))
+ )
// These should never happen! We already checked in the reducer!
- case None => Seq( SequenceFailure( piiId, ref, UnknownProcessException( piReduced, name ) ) )
- case Some(_) => Seq( SequenceFailure( piiId, ref, AtomicProcessIsCompositeException( piReduced, name ) ) )
+ case None => Seq(SequenceFailure(piiId, ref, UnknownProcessException(piReduced, name)))
+ case Some(_) => Seq(SequenceFailure(piiId, ref, AtomicProcessIsCompositeException(piReduced, name)))
}
}
}
diff --git a/src/com/workflowfm/pew/stateless/components/ResultListener.scala b/src/com/workflowfm/pew/stateless/components/ResultListener.scala
index efc9060..c42912e 100644
--- a/src/com/workflowfm/pew/stateless/components/ResultListener.scala
+++ b/src/com/workflowfm/pew/stateless/components/ResultListener.scala
@@ -7,16 +7,17 @@ import org.bson.types.ObjectId
import scala.language.implicitConversions
import scala.concurrent.ExecutionContext
-class ResultListener (implicit val executionContext:ExecutionContext = ExecutionContext.global)
- extends StatelessComponent[PiiLog, Unit]
- with PiObservable[ObjectId] with SimplePiObservable[ObjectId] {
+class ResultListener(implicit val executionContext: ExecutionContext = ExecutionContext.global)
+ extends StatelessComponent[PiiLog, Unit]
+ with PiObservable[ObjectId]
+ with SimplePiObservable[ObjectId] {
- override def respond: PiiLog => Unit = msg => publish( msg.event )
+ override def respond: PiiLog => Unit = msg => publish(msg.event)
}
object ResultListener {
- def apply( handlers: PiEventHandler[ObjectId]* ): ResultListener = {
+ def apply(handlers: PiEventHandler[ObjectId]*): ResultListener = {
val newResultListener: ResultListener = new ResultListener
handlers foreach newResultListener.subscribe
newResultListener
diff --git a/src/com/workflowfm/pew/stateless/components/StatelessComponent.scala b/src/com/workflowfm/pew/stateless/components/StatelessComponent.scala
index 125e7ee..bf804b7 100644
--- a/src/com/workflowfm/pew/stateless/components/StatelessComponent.scala
+++ b/src/com/workflowfm/pew/stateless/components/StatelessComponent.scala
@@ -1,9 +1,8 @@
package com.workflowfm.pew.stateless.components
-abstract class StatelessComponent[In, Out]
- extends Function[In, Out] {
+abstract class StatelessComponent[In, Out] extends Function[In, Out] {
def respond: In => Out
- override def apply( in: In ): Out = respond( in )
-}
\ No newline at end of file
+ override def apply(in: In): Out = respond(in)
+}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/KafkaExecutor.scala b/src/com/workflowfm/pew/stateless/instances/kafka/KafkaExecutor.scala
index 71fd750..e54d361 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/KafkaExecutor.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/KafkaExecutor.scala
@@ -10,8 +10,8 @@ import scala.concurrent.ExecutionContext
/** Wrapper for a CustomKafkaExecutor that controls the lifecycle of an arbitrary collection
* of local Kafka connectors.
*/
-class CustomKafkaExecutor( components: DrainControl* )( implicit env: KafkaExecutorEnvironment )
- extends MinimalKafkaExecutor()( env ) {
+class CustomKafkaExecutor(components: DrainControl*)(implicit env: KafkaExecutorEnvironment)
+ extends MinimalKafkaExecutor()(env) {
override lazy val allControls: Seq[DrainControl] = eventHandlerControl +: components.toSeq
}
@@ -25,16 +25,15 @@ object CompleteKafkaExecutor {
import KafkaConnectors._
- def apply[ResultT]( implicit settings: KafkaExecutorSettings )
- : CustomKafkaExecutor = {
+ def apply[ResultT](implicit settings: KafkaExecutorSettings): CustomKafkaExecutor = {
- implicit val env: KafkaExecutorEnvironment = settings.createEnvironment()
+ implicit val env: KafkaExecutorEnvironment = settings.createEnvironment()
implicit val executionContext: ExecutionContext = env.context
new CustomKafkaExecutor(
indySequencer,
- indyReducer( new Reducer ),
- indyAtomicExecutor( new AtomicExecutor() )
+ indyReducer(new Reducer),
+ indyAtomicExecutor(new AtomicExecutor())
)
}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/MinimalKafkaExecutor.scala b/src/com/workflowfm/pew/stateless/instances/kafka/MinimalKafkaExecutor.scala
index 089fdea..5dc2120 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/MinimalKafkaExecutor.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/MinimalKafkaExecutor.scala
@@ -18,8 +18,9 @@ import scala.concurrent._
* Other components are required are required to run on the Kafka cluster
* but need not be situated on the local machine.
*/
-class MinimalKafkaExecutor( implicit val environment: KafkaExecutorEnvironment )
- extends StatelessExecutor[ObjectId] with DelegatedPiObservable[ObjectId] {
+class MinimalKafkaExecutor(implicit val environment: KafkaExecutorEnvironment)
+ extends StatelessExecutor[ObjectId]
+ with DelegatedPiObservable[ObjectId] {
private val logger: Logger = LoggerFactory.getLogger(this.getClass)
@@ -27,9 +28,9 @@ class MinimalKafkaExecutor( implicit val environment: KafkaExecutorEnvironment )
import com.workflowfm.pew.stateless.instances.kafka.components.KafkaConnectors._
// Implicit settings.
- override implicit val executionContext: ExecutionContext = environment.context
- implicit val actorSystem: ActorSystem = environment.actors
- implicit val materializer: Materializer = environment.materializer
+ implicit override val executionContext: ExecutionContext = environment.context
+ implicit val actorSystem: ActorSystem = environment.actors
+ implicit val materializer: Materializer = environment.materializer
protected var piiStore: PiInstanceStore[ObjectId] = SimpleInstanceStore()
@@ -41,45 +42,47 @@ class MinimalKafkaExecutor( implicit val environment: KafkaExecutorEnvironment )
* @param args The PiObject arguments to be passed to the process
* @return A Future with the new unique ID that was generated
*/
- override def init( process: PiProcess, args: Seq[PiObject] ): Future[ObjectId] = {
- if (isShutdown) throw new ShutdownExecutorException( "`init` was called." )
+ override def init(process: PiProcess, args: Seq[PiObject]): Future[ObjectId] = {
+ if (isShutdown) throw new ShutdownExecutorException("`init` was called.")
val piiId = ObjectId.get
- piiStore = piiStore.put( PiInstance( piiId, process, args:_* ) )
- Future.successful( piiId )
+ piiStore = piiStore.put(PiInstance(piiId, process, args: _*))
+ Future.successful(piiId)
}
- override def start( id: ObjectId ): Unit = {
- if (isShutdown) throw new ShutdownExecutorException( "`start` was called." )
+ override def start(id: ObjectId): Unit = {
+ if (isShutdown) throw new ShutdownExecutorException("`start` was called.")
- piiStore.get( id ) match {
+ piiStore.get(id) match {
case None =>
- eventHandler.publish( PiFailureNoSuchInstance( id ) )
+ eventHandler.publish(PiFailureNoSuchInstance(id))
- case Some( pii ) =>
- logger.info( "Seeding initial 'ReduceRequest'." )
+ case Some(pii) =>
+ logger.info("Seeding initial 'ReduceRequest'.")
sendMessages(
- ReduceRequest( pii, Seq() ),
- PiiLog( PiEventStart( pii ) )
+ ReduceRequest(pii, Seq()),
+ PiiLog(PiEventStart(pii))
)
}
}
// Necessary local KafkaComponent instance.
- val eventHandler: ResultListener = new ResultListener
+ val eventHandler: ResultListener = new ResultListener
override val worker: PiObservable[ObjectId] = eventHandler
- val eventHandlerControl: DrainControl = uniqueResultListener( eventHandler )
- lazy val allControls: Seq[DrainControl] = Seq( eventHandlerControl )
+ val eventHandlerControl: DrainControl = uniqueResultListener(eventHandler)
+ lazy val allControls: Seq[DrainControl] = Seq(eventHandlerControl)
override def shutdown: Future[Done] = {
- KafkaConnectors.drainAndShutdownAll( allControls )
- .flatMap( _ => environment.actors.terminate().map( _ => Done ) )
+ KafkaConnectors
+ .drainAndShutdownAll(allControls)
+ .flatMap(_ => environment.actors.terminate().map(_ => Done))
}
override def forceShutdown: Future[Done] = {
- KafkaConnectors.shutdownAll( allControls )
- .flatMap( _ => environment.actors.terminate().map( _ => Done ) )
+ KafkaConnectors
+ .shutdownAll(allControls)
+ .flatMap(_ => environment.actors.terminate().map(_ => Done))
}
def isShutdown: Boolean = eventHandlerControl.isShutdown.isCompleted
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/components/KafkaConnectors.scala b/src/com/workflowfm/pew/stateless/instances/kafka/components/KafkaConnectors.scala
index 1d79764..db6d9ae 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/components/KafkaConnectors.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/components/KafkaConnectors.scala
@@ -32,8 +32,8 @@ object KafkaConnectors {
import StatelessMessages._
type DrainControl = DrainingControl[Done]
- type Environment = KafkaExecutorEnvironment
- type Settings = KafkaExecutorSettings
+ type Environment = KafkaExecutorEnvironment
+ type Settings = KafkaExecutorSettings
/** Creates a temporary Kafka Producer capable of sending a single message.
*
@@ -41,10 +41,11 @@ object KafkaConnectors {
* @param s KafkaExecutorSettings controlling the interface with the Kafka Driver.
* @return
*/
- def sendMessages( msgs: AnyMsg* )( implicit env: Environment ): Future[Done]
- = Untracked.source( msgs )
- .via( flowLogOut )
- .runWith( Untracked.sink )( env.materializer )
+ def sendMessages(msgs: AnyMsg*)(implicit env: Environment): Future[Done] =
+ Untracked
+ .source(msgs)
+ .via(flowLogOut)
+ .runWith(Untracked.sink)(env.materializer)
/** Run an independent reducer off of a ReduceRequest topic. This allows a
* reducer to create responses to each ReduceRequest individually by using the
@@ -54,14 +55,13 @@ object KafkaConnectors {
* @param s KafkaExecutorSettings controlling the interface with the Kafka Driver.
* @return Control object for the running process.
*/
- def indyReducer( red: Reducer )( implicit env: Environment ): DrainControl
- = run(
- srcReduceRequest[PartTracked]
+ def indyReducer(red: Reducer)(implicit env: Environment): DrainControl = run(
+ srcReduceRequest[PartTracked]
via flowLogIn
- via flowRespond( red )
+ via flowRespond(red)
via flowLogOut,
- Tracked.sinkMulti[PartTracked, AnyMsg]
- )
+ Tracked.sinkMulti[PartTracked, AnyMsg]
+ )
/** Run an independent sequencer off of a PiiHistory topic, outputting sequenced
* ReduceRequests into a separate ReduceRequest topic.
@@ -69,16 +69,14 @@ object KafkaConnectors {
* @param s KafkaExecutorSettings controlling the interface with the Kafka Driver.
* @return Control object for the running process.
*/
- def indySequencer( implicit env: Environment ): DrainControl
- = run(
- srcPiiHistory[PartTracked]
+ def indySequencer(implicit env: Environment): DrainControl = run(
+ srcPiiHistory[PartTracked]
via flowLogIn
- groupBy( Int.MaxValue, _.part )
+ groupBy (Int.MaxValue, _.part)
via flowSequencer
- via flowLogOut
- mergeSubstreams,
- Tracked.sinkMulti[PartTracked, AnyMsg]
- )
+ via flowLogOut mergeSubstreams,
+ Tracked.sinkMulti[PartTracked, AnyMsg]
+ )
/** Run a reducer directly off the output of a Sequencer. Doing this negates the need
* for a ReduceRequest topic, however it does necessitate the consumption of multiple
@@ -106,17 +104,15 @@ object KafkaConnectors {
* @param s KafkaExecutorSettings controlling the interface with the Kafka Driver.
* @return Control object for the running process.
*/
- def indyAtomicExecutor( exec: AtomicExecutor, threadsPerPart: Int = 1 )( implicit env: Environment ): DrainControl
- = run(
- srcAssignment[PartTracked]
+ def indyAtomicExecutor(exec: AtomicExecutor, threadsPerPart: Int = 1)(implicit env: Environment): DrainControl = run(
+ srcAssignment[PartTracked]
via flowLogIn
- groupBy( Int.MaxValue, _.part )
- via flowRespond( exec )
- via flowWaitFuture( threadsPerPart )
- via flowLogOut
- mergeSubstreams,
- Tracked.sinkMulti[PartTracked, AnyMsg]
- )
+ groupBy (Int.MaxValue, _.part)
+ via flowRespond(exec)
+ via flowWaitFuture(threadsPerPart)
+ via flowLogOut mergeSubstreams,
+ Tracked.sinkMulti[PartTracked, AnyMsg]
+ )
/** Restart a terminated ResultListener group, join an existing group, or start a ResultListener with a specific
* group id. Useful as PiiResult messages might need to be visible to multiple ResultListeners.
@@ -124,9 +120,8 @@ object KafkaConnectors {
* @param s KafkaExecutorSettings controlling the interface with the Kafka Driver.
* @return Control object for the running process.
*/
- def specificResultListener( groupId: String )( resl: ResultListener )( implicit env: Environment ): DrainControl
- = run(
- srcResult[Untracked]( groupId )
+ def specificResultListener(groupId: String)(resl: ResultListener)(implicit env: Environment): DrainControl = run(
+ srcResult[Untracked](groupId)
wireTap { msg =>
msg.value.event match {
case res: PiEventFinish[_] =>
@@ -135,9 +130,9 @@ object KafkaConnectors {
case _ => // Other messages are uninteresting.
}
}
- via flowRespond( resl ),
- Sink.ignore
- )
+ via flowRespond(resl),
+ Sink.ignore
+ )
/** Override `source` to give handler a uniqueGroupId so it each KafkaEventHandler
* component can listen to all events on the cluster. The ResultListener is responsible
@@ -146,22 +141,20 @@ object KafkaConnectors {
* @param s KafkaExecutorSettings controlling the interface with the Kafka Driver.
* @return Control object for the running process.
*/
- def uniqueResultListener( resl: ResultListener )( implicit env: Environment ): DrainControl
- = specificResultListener( "Event-Group-" + ObjectId.get.toHexString )( resl )
+ def uniqueResultListener(resl: ResultListener)(implicit env: Environment): DrainControl =
+ specificResultListener("Event-Group-" + ObjectId.get.toHexString)(resl)
- def shutdown( controls: Control* )( implicit env: Environment ): Future[Done]
- = shutdownAll( controls )
+ def shutdown(controls: Control*)(implicit env: Environment): Future[Done] = shutdownAll(controls)
- def shutdownAll( controls: Seq[Control] )( implicit env: Environment ): Future[Done] = {
+ def shutdownAll(controls: Seq[Control])(implicit env: Environment): Future[Done] = {
implicit val ctx: ExecutionContext = env.context
- Future.sequence( controls map (_.shutdown()) ).map( _ => Done )
+ Future.sequence(controls map (_.shutdown())).map(_ => Done)
}
- def drainAndShutdown( controls: DrainControl* )( implicit env: Environment ): Future[Done]
- = drainAndShutdownAll( controls )
+ def drainAndShutdown(controls: DrainControl*)(implicit env: Environment): Future[Done] = drainAndShutdownAll(controls)
- def drainAndShutdownAll( controls: Seq[DrainControl] )( implicit env: Environment ): Future[Done] = {
+ def drainAndShutdownAll(controls: Seq[DrainControl])(implicit env: Environment): Future[Done] = {
implicit val ctx: ExecutionContext = env.context
- Future.sequence( controls map (_.drainAndShutdown()) ).map( _ => Done )
+ Future.sequence(controls map (_.drainAndShutdown())).map(_ => Done)
}
}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/components/KafkaWrapperFlows.scala b/src/com/workflowfm/pew/stateless/instances/kafka/components/KafkaWrapperFlows.scala
index 3fcc3f1..2ef6e28 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/components/KafkaWrapperFlows.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/components/KafkaWrapperFlows.scala
@@ -27,15 +27,19 @@ object KafkaWrapperFlows {
*
* @return Akka source containing messages published to the `ReduceRequest` topic.
*/
- def srcReduceRequest[T[X] <: Tracked[X]]( implicit env: Environment, f: TrackedSource[T] ): Source[T[ReduceRequest], Control]
- = Tracked.source( env.csReduceRequest, topics( env.settings.tnReduceRequest ) )
+ def srcReduceRequest[T[X] <: Tracked[X]](
+ implicit env: Environment,
+ f: TrackedSource[T]
+ ): Source[T[ReduceRequest], Control] = Tracked.source(env.csReduceRequest, topics(env.settings.tnReduceRequest))
/** Kafka Consumer for the `Assignment` topic.
*
* @return Akka source containing messages published to the `Assignment` topic.
*/
- def srcAssignment[T[X] <: Tracked[X]]( implicit env: Environment, f: TrackedSource[T] ): Source[T[Assignment], Control]
- = Tracked.source( env.csAssignment, topics( env.settings.tnAssignment ) )
+ def srcAssignment[T[X] <: Tracked[X]](
+ implicit env: Environment,
+ f: TrackedSource[T]
+ ): Source[T[Assignment], Control] = Tracked.source(env.csAssignment, topics(env.settings.tnAssignment))
/** Synchronous handling intended for consuming a single `PiiHistory` partition. Blocks until a ReduceRequest is
* released by receiving both a PiiUpdate *and* at least one SequenceRequest. All SequenceRequests received are
@@ -43,19 +47,21 @@ object KafkaWrapperFlows {
*
* @return Akka flow capable of sequencing a single `PiiHistory` partition into a ReduceRequest stream.
*/
- def flowSequencer[T[X] <: Tracked[X]]: Flow[T[PiiHistory], T[Seq[AnyMsg]], NotUsed]
- = Flow[T[PiiHistory]]
- .scan( new SequenceResponseBuilder[T] )( _ next _ )
- .map( _.response )
- .collect({ case Some( m ) => m })
+ def flowSequencer[T[X] <: Tracked[X]]: Flow[T[PiiHistory], T[Seq[AnyMsg]], NotUsed] =
+ Flow[T[PiiHistory]]
+ .scan(new SequenceResponseBuilder[T])(_ next _)
+ .map(_.response)
+ .collect({ case Some(m) => m })
/** Kafka Consumer for the `PiiHistory` topic. Exposes the individual partition sources so they handled individually
* by `flowPartition` argument.
*
* @return Akka source containing the processed output messages of each partition.
*/
- def srcPiiHistory[T[X] <: Tracked[X]]( implicit env: Environment, f: TrackedSource[T] ): Source[T[PiiHistory], Control]
- = Tracked.source( env.csPiiHistory, topics( env.settings.tnPiiHistory ) )
+ def srcPiiHistory[T[X] <: Tracked[X]](
+ implicit env: Environment,
+ f: TrackedSource[T]
+ ): Source[T[PiiHistory], Control] = Tracked.source(env.csPiiHistory, topics(env.settings.tnPiiHistory))
/** Kafka Consumer for the `Result` topic. Configurable Group Id to allow control of the starting point
* of consumption.
@@ -63,77 +69,80 @@ object KafkaWrapperFlows {
* @param groupId GroupId for the Kafka Consumer.
* @return Akka source containing messages published to the `Results` topic.
*/
- def srcResult[T[X] <: Tracked[X]]( groupId: String )( implicit env: Environment, f: TrackedSource[T] ): Source[T[PiiLog], Control]
- = Tracked.source( env.csResult withGroupId groupId, topics( env.settings.tnResult ) )
+ def srcResult[T[X] <: Tracked[X]](
+ groupId: String
+ )(implicit env: Environment, f: TrackedSource[T]): Source[T[PiiLog], Control] =
+ Tracked.source(env.csResult withGroupId groupId, topics(env.settings.tnResult))
/** Kafka Consumers for each topic merged into a single Akka source.
*
* @return Merged source for all topics.
*/
- def srcAll( implicit env: Environment ): Source[CommitTracked[AnyMsg], Control] = {
+ def srcAll(implicit env: Environment): Source[CommitTracked[AnyMsg], Control] = {
- def src[V <: AnyMsg]( cs: ConsumerSettings[_, V], topic: String ): Source[CommitTracked[AnyMsg], Control]
- = CommitTracked.source( cs, topics( topic ) )
- .map( Tracked.fmap( _.asInstanceOf[AnyMsg] ) )
+ def src[V <: AnyMsg](cs: ConsumerSettings[_, V], topic: String): Source[CommitTracked[AnyMsg], Control] =
+ CommitTracked
+ .source(cs, topics(topic))
+ .map(Tracked.fmap(_.asInstanceOf[AnyMsg]))
Seq(
- src( env.csReduceRequest, env.settings.tnReduceRequest ),
- src( env.csPiiHistory, env.settings.tnPiiHistory ),
- src( env.csAssignment, env.settings.tnAssignment ),
- src( env.csResult, env.settings.tnResult )
-
+ src(env.csReduceRequest, env.settings.tnReduceRequest),
+ src(env.csPiiHistory, env.settings.tnPiiHistory),
+ src(env.csAssignment, env.settings.tnAssignment),
+ src(env.csResult, env.settings.tnResult)
).reduce(_ merge _)
}
-
/// STREAM PROCESSING / AKKA FLOWS ///
- def flowLogIn[T <: Tracked[_]]( implicit env: Environment ): Flow[ T, T, NotUsed ]
- = Flow[T].wireTap( m => env.settings.logMessageReceived( m ) )
+ def flowLogIn[T <: Tracked[_]](implicit env: Environment): Flow[T, T, NotUsed] =
+ Flow[T].wireTap(m => env.settings.logMessageReceived(m))
- def flowLogOut[T <: Tracked[_]]( implicit env: Environment ): Flow[ T, T, NotUsed ]
- = Flow[T].wireTap( m => env.settings.logMessageSent( m ) )
+ def flowLogOut[T <: Tracked[_]](implicit env: Environment): Flow[T, T, NotUsed] =
+ Flow[T].wireTap(m => env.settings.logMessageSent(m))
- def flowRespond[T[X] <: Tracked[X], In, Out]( component: StatelessComponent[In, Out] )
- : Flow[ T[In], T[Out], NotUsed ]
- = Flow[ T[In] ]
- .map( Tracked.fmap[T, In, Out]( component.respond ) )
+ def flowRespond[T[X] <: Tracked[X], In, Out](component: StatelessComponent[In, Out]): Flow[T[In], T[Out], NotUsed] =
+ Flow[T[In]]
+ .map(Tracked.fmap[T, In, Out](component.respond))
- def flowRespondAll[T[X] <: Tracked[X], In, Out]( component: StatelessComponent[In, Out] )
- : Flow[ T[Seq[In]], T[Seq[Out]], NotUsed ]
- = Flow[ T[Seq[In]] ]
- .map( Tracked.fmap[T, Seq[In], Seq[Out]]( _ map component.respond ) )
+ def flowRespondAll[T[X] <: Tracked[X], In, Out](
+ component: StatelessComponent[In, Out]
+ ): Flow[T[Seq[In]], T[Seq[Out]], NotUsed] =
+ Flow[T[Seq[In]]]
+ .map(Tracked.fmap[T, Seq[In], Seq[Out]](_ map component.respond))
- def flowWaitFuture[T[X] <: Tracked[X], Msg]( parallelism: Int )( implicit env: Environment )
- : Flow[ T[Future[Msg]], T[Msg], NotUsed ] = {
+ def flowWaitFuture[T[X] <: Tracked[X], Msg](
+ parallelism: Int
+ )(implicit env: Environment): Flow[T[Future[Msg]], T[Msg], NotUsed] = {
- Flow[ T[Future[Msg]] ].mapAsync( parallelism )( Tracked.exposeFuture(_)(env.context) )
+ Flow[T[Future[Msg]]].mapAsync(parallelism)(Tracked.exposeFuture(_)(env.context))
}
- def flowCheck[T]: Flow[Tracked[T], T, NotUsed]
- = Flow[Tracked[T]]
- .map( _.value )
-
- def flowCheckMulti[T]: Flow[Tracked[Seq[T]], T, NotUsed]
- = Flow[Tracked[Seq[T]]]
- .map( _.value )
- .flatMapConcat( s => Source.fromIterator( () => s.iterator ) )
+ def flowCheck[T]: Flow[Tracked[T], T, NotUsed] =
+ Flow[Tracked[T]]
+ .map(_.value)
+ def flowCheckMulti[T]: Flow[Tracked[Seq[T]], T, NotUsed] =
+ Flow[Tracked[Seq[T]]]
+ .map(_.value)
+ .flatMapConcat(s => Source.fromIterator(() => s.iterator))
/// KAFKA PRODUCER / AKKA SINKS ///
- def sinkPlain( implicit env: Environment ): Sink[AnyMsg, Future[Done]]
- = Producer.plainSink( env.psAllMessages )
- .contramap( env.settings.record )
-
+ def sinkPlain(implicit env: Environment): Sink[AnyMsg, Future[Done]] =
+ Producer
+ .plainSink(env.psAllMessages)
+ .contramap(env.settings.record)
/// OTHER FUNCTIONALITY ///
- def run[T]( source: Source[T, Control], sink: Sink[T, Future[Done]] )( implicit env: Environment ): DrainingControl[Done]
- = source
- .toMat( sink )( Keep.both )
- .mapMaterializedValue( DrainingControl.apply ) // Add shutdown control object.
- .named( this.getClass.getSimpleName ) // Name for debugging.
- .run()( env.materializer )
+ def run[T](source: Source[T, Control], sink: Sink[T, Future[Done]])(
+ implicit env: Environment
+ ): DrainingControl[Done] =
+ source
+ .toMat(sink)(Keep.both)
+ .mapMaterializedValue(DrainingControl.apply) // Add shutdown control object.
+ .named(this.getClass.getSimpleName) // Name for debugging.
+ .run()(env.materializer)
}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/components/SequenceResponseBuilder.scala b/src/com/workflowfm/pew/stateless/instances/kafka/components/SequenceResponseBuilder.scala
index fb0d9aa..ef68be5 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/components/SequenceResponseBuilder.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/components/SequenceResponseBuilder.scala
@@ -8,63 +8,61 @@ import org.bson.types.ObjectId
import scala.collection.immutable
case class PartialResponse(
- pii: Option[PiInstance[ObjectId]],
- returns: immutable.Seq[CallResult],
- errors: immutable.Seq[PiFailure[ObjectId]]
- ) {
+ pii: Option[PiInstance[ObjectId]],
+ returns: immutable.Seq[CallResult],
+ errors: immutable.Seq[PiFailure[ObjectId]]
+) {
- def this() = this( None, immutable.Seq(), immutable.Seq() )
+ def this() = this(None, immutable.Seq(), immutable.Seq())
/** All the AtomicProcess calls that are known to be concluded within this response.
*/
lazy val finishedCalls: Set[Int] = {
val returnedCalls = returns.map(_._1.id)
- val crashedCalls = errors.collect { case event: PiEventCallEnd[_] => event.ref }
- ( returnedCalls ++ crashedCalls ).toSet
+ val crashedCalls = errors.collect { case event: PiEventCallEnd[_] => event.ref }
+ (returnedCalls ++ crashedCalls).toSet
}
/** We only *want* to send a response when we have actionable data to send:
* - ResultFailure <- The latest PiInstance *and* all the SequenceRequests to dump.
* - ReduceRequest <- The latest PiInstance *and* at least one call ref to sequence.
*/
- val hasPayload: Boolean
- = pii.exists(
- pii =>
- if (errors.nonEmpty)
- pii.called.forall( finishedCalls.contains )
- else
- returns.nonEmpty
- )
+ val hasPayload: Boolean = pii.exists(
+ pii =>
+ if (errors.nonEmpty)
+ pii.called.forall(finishedCalls.contains)
+ else
+ returns.nonEmpty
+ )
val cantSend: Boolean = pii.isEmpty
/** @return A full message which could be built with this data, or nothing.
*/
- def message: Seq[AnyMsg]
- = pii.map( pii =>
- if (errors.nonEmpty) {
- if (hasPayload) Seq( PiiLog( PiFailure.condense( errors ) ) )
- else Seq( SequenceFailure( Right( pii ), returns, errors ) )
- } else Seq( ReduceRequest( pii, returns ) )
-
- ).getOrElse( Seq() )
+ def message: Seq[AnyMsg] =
+ pii
+ .map(
+ pii =>
+ if (errors.nonEmpty) {
+ if (hasPayload) Seq(PiiLog(PiFailure.condense(errors)))
+ else Seq(SequenceFailure(Right(pii), returns, errors))
+ } else Seq(ReduceRequest(pii, returns))
+ )
+ .getOrElse(Seq())
/** Overwrite with the latest PiInstance information.
*/
- def update( newPii: PiInstance[ObjectId] ): PartialResponse
- = PartialResponse( Some( newPii ), returns, errors )
+ def update(newPii: PiInstance[ObjectId]): PartialResponse = PartialResponse(Some(newPii), returns, errors)
/** Update the full list of results to sequence into the next reduce.
*/
- def update( arg: (CallRef, PiObject) ): PartialResponse
- = PartialResponse( pii, arg +: returns, errors )
-
- def merge( failure: SequenceFailure ): PartialResponse
- = PartialResponse(
- failure.pii.toOption.orElse( pii ),
- failure.returns.to[immutable.Seq] ++ returns,
- failure.errors.to[immutable.Seq] ++ errors
- )
+ def update(arg: (CallRef, PiObject)): PartialResponse = PartialResponse(pii, arg +: returns, errors)
+
+ def merge(failure: SequenceFailure): PartialResponse = PartialResponse(
+ failure.pii.toOption.orElse(pii),
+ failure.returns.to[immutable.Seq] ++ returns,
+ failure.errors.to[immutable.Seq] ++ errors
+ )
}
/** Object that aggregates information required to properly respond to a sequence
@@ -79,40 +77,40 @@ case class PartialResponse(
* @param responses A map of partial responses for PiInstances that need reducing.
*/
class SequenceResponseBuilder[T[X] <: Tracked[X]](
- val consuming: immutable.List[T[PiiHistory]],
- val responses: immutable.HashMap[ObjectId, PartialResponse],
- ) {
+ val consuming: immutable.List[T[PiiHistory]],
+ val responses: immutable.HashMap[ObjectId, PartialResponse]
+) {
type PiiT = PiInstance[ObjectId]
- type Arg = (CallRef, PiObject)
+ type Arg = (CallRef, PiObject)
/** @return An empty response builder that consumes no messages.
*/
- def this() = this( List(), new immutable.HashMap )
+ def this() = this(List(), new immutable.HashMap)
/** MultiMessage response for the consumed messages, or None if the
* consumed messages lack sufficient information for a response.
*/
val response: Option[T[Seq[AnyMsg]]] =
- if ( responses.toSeq.exists( _._2.hasPayload ) ) {
+ if (responses.toSeq.exists(_._2.hasPayload)) {
- // All necessary responses:
- // - None if we lack information for a ReduceRequest
- // - Or a complete response. However, this isn't final,
- // it may be updated when integrating new messages.
- lazy val allMessages: Seq[AnyMsg] = responses.values.flatMap( _.message ).toSeq
+ // All necessary responses:
+ // - None if we lack information for a ReduceRequest
+ // - Or a complete response. However, this isn't final,
+ // it may be updated when integrating new messages.
+ lazy val allMessages: Seq[AnyMsg] = responses.values.flatMap(_.message).toSeq
- // All PiInstances we've received information for need to be "reduced"
- // otherwise we would lose their state information when they are consumed.
- // Additionally, for performance: do not emmit empty responses.
- if ( responses.values.exists( _.cantSend ) || allMessages.isEmpty ) None
- else Some( Tracked.freplace( Tracked.flatten(consuming.reverse) )( allMessages ) )
+ // All PiInstances we've received information for need to be "reduced"
+ // otherwise we would lose their state information when they are consumed.
+ // Additionally, for performance: do not emmit empty responses.
+ if (responses.values.exists(_.cantSend) || allMessages.isEmpty) None
+ else Some(Tracked.freplace(Tracked.flatten(consuming.reverse))(allMessages))
- } else None // If there is no reduce request with a payload, wait for one.
+ } else None // If there is no reduce request with a payload, wait for one.
// Helper function for updating the responses hash map.
- def update( id: ObjectId, func: PartialResponse => PartialResponse ): immutable.HashMap[ObjectId, PartialResponse]
- = responses.updated( id, func( responses.getOrElse( id, new PartialResponse ) ) )
+ def update(id: ObjectId, func: PartialResponse => PartialResponse): immutable.HashMap[ObjectId, PartialResponse] =
+ responses.updated(id, func(responses.getOrElse(id, new PartialResponse)))
/** Construct a new SequenceResponseBuilder which is responsible for
* properly consuming an additional message.
@@ -120,18 +118,14 @@ class SequenceResponseBuilder[T[X] <: Tracked[X]](
* @param msgIn The next `CommittableMessage` to handle.
* @return A new SequenceResponseBuilder instance.
*/
- def next( msgIn: T[PiiHistory] ): SequenceResponseBuilder[T] =
-
+ def next(msgIn: T[PiiHistory]): SequenceResponseBuilder[T] =
// If we have already sent a response, start constructing the subsequent response instead.
if (response.nonEmpty)
- (new SequenceResponseBuilder).next( msgIn )
-
+ (new SequenceResponseBuilder).next(msgIn)
else // Otherwise begin integrating the new message into this incomplete response.
new SequenceResponseBuilder(
-
// The new message needs to be consumed with when this message commits.
msgIn :: consuming,
-
// Update partial results:
update(
msgIn.value.piiId,
@@ -142,4 +136,4 @@ class SequenceResponseBuilder[T[X] <: Tracked[X]](
}
)
)
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/components/Tracked.scala b/src/com/workflowfm/pew/stateless/instances/kafka/components/Tracked.scala
index 4253631..e994d1d 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/components/Tracked.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/components/Tracked.scala
@@ -29,7 +29,7 @@ abstract class Tracked[Value] {
* @tparam NewValue The result type of the function.
* @return A new Tracked type with the same tracking, containing the new value.
*/
- protected def map[NewValue]( fn: Value => NewValue ): Tracked[NewValue]
+ protected def map[NewValue](fn: Value => NewValue): Tracked[NewValue]
/** Apply a fold to 2 messages so that their tracking information can be combined.
*
@@ -39,34 +39,34 @@ abstract class Tracked[Value] {
* @return A new Tracked object with the result of `fn` on each wrapped object
* which contains the merged tracking information.
*/
- protected def fold[NewValue]( fn: (Value, NewValue) => Value )( other: Tracked[NewValue] ): Tracked[Value]
+ protected def fold[NewValue](fn: (Value, NewValue) => Value)(other: Tracked[NewValue]): Tracked[Value]
}
/** Superclass of objects capable of creating new Kafka Sources of Tracked types.
*
* @tparam T The tracked wrapper type of the objects emitted by the Source.
*/
-trait TrackedSource[ T[X] <: Tracked[X] ] {
+trait TrackedSource[T[X] <: Tracked[X]] {
implicit val trackedSource: TrackedSource[T] = this
- def source[V]( cs: ConsumerSettings[_, V], sub: AutoSubscription ): Source[T[V], Control]
+ def source[V](cs: ConsumerSettings[_, V], sub: AutoSubscription): Source[T[V], Control]
}
/** Superclass of factory objects capable of creating new Kafka Sinks for Tracked types.
*
* @tparam T The tracked wrapper type of the objects consumed by the Sink.
*/
-trait TrackedSink[ T[X] <: Tracked[X] ] {
+trait TrackedSink[T[X] <: Tracked[X]] {
implicit val trackedSink: TrackedSink[T] = this
- def sink[V <: AnyMsg]( implicit s: KafkaExecutorEnvironment ): Sink[T[V], Future[Done]]
+ def sink[V <: AnyMsg](implicit s: KafkaExecutorEnvironment): Sink[T[V], Future[Done]]
}
/** Superclass of factory objects capable of creating new Kafka Sinks for Tracked collection types.
*
* @tparam T The tracked wrapper type of the objects consumed by the Sink.
*/
-trait TrackedMultiSink[ T[X] <: Tracked[X] ] {
+trait TrackedMultiSink[T[X] <: Tracked[X]] {
implicit val trackedMultiSink: TrackedMultiSink[T] = this
- def sinkMulti[V <: AnyMsg]( implicit s: KafkaExecutorEnvironment ): Sink[T[Seq[V]], Future[Done]]
+ def sinkMulti[V <: AnyMsg](implicit s: KafkaExecutorEnvironment): Sink[T[Seq[V]], Future[Done]]
}
/** Tracked type helper functions, using implicits to redirect to the correct
@@ -74,16 +74,20 @@ trait TrackedMultiSink[ T[X] <: Tracked[X] ] {
*/
object Tracked {
- def source[T[X] <: Tracked[X], V]
- ( cs: ConsumerSettings[_, V], sub: AutoSubscription )
- ( implicit s: KafkaExecutorEnvironment, ts: TrackedSource[T] )
- : Source[T[V], Control] = ts.source( cs, sub )
+ def source[T[X] <: Tracked[X], V](cs: ConsumerSettings[_, V], sub: AutoSubscription)(
+ implicit s: KafkaExecutorEnvironment,
+ ts: TrackedSource[T]
+ ): Source[T[V], Control] = ts.source(cs, sub)
- def sink[T[X] <: Tracked[X], V <: AnyMsg]( implicit s: KafkaExecutorEnvironment, ts: TrackedSink[T] ): Sink[T[V], Future[Done]]
- = ts.sink( s )
+ def sink[T[X] <: Tracked[X], V <: AnyMsg](
+ implicit s: KafkaExecutorEnvironment,
+ ts: TrackedSink[T]
+ ): Sink[T[V], Future[Done]] = ts.sink(s)
- def sinkMulti[T[X] <: Tracked[X], V <: AnyMsg]( implicit s: KafkaExecutorEnvironment, ts: TrackedMultiSink[T] ): Sink[T[Seq[V]], Future[Done]]
- = ts.sinkMulti( s )
+ def sinkMulti[T[X] <: Tracked[X], V <: AnyMsg](
+ implicit s: KafkaExecutorEnvironment,
+ ts: TrackedMultiSink[T]
+ ): Sink[T[Seq[V]], Future[Done]] = ts.sinkMulti(s)
/** Map a function over the value within a tracked type.
*
@@ -94,8 +98,8 @@ object Tracked {
* @tparam Out Wrapped output type.
* @return A new Tracked type (of type `T`) with value of the output of `fn`.
*/
- def fmap[T[X] <: Tracked[X], In, Out]( fn: In => Out )( trackedIn: T[In] ): T[Out]
- = trackedIn.map( fn ).asInstanceOf[T[Out]]
+ def fmap[T[X] <: Tracked[X], In, Out](fn: In => Out)(trackedIn: T[In]): T[Out] =
+ trackedIn.map(fn).asInstanceOf[T[Out]]
/** Replace the value within a Tracked wrapper without updating the tracking information.
*
@@ -106,8 +110,7 @@ object Tracked {
* @tparam Out Wrapped output type.
* @return A new Tracked object of type `T`, containing `newValue` and the tracking from `tracked`.
*/
- def freplace[T[X] <: Tracked[X], In, Out]( tracked: T[In] )( newValue: Out ): T[Out]
- = fmap( (_: In) => newValue )( tracked )
+ def freplace[T[X] <: Tracked[X], In, Out](tracked: T[In])(newValue: Out): T[Out] = fmap((_: In) => newValue)(tracked)
/** Expose a future contained within a Tracked wrapper,
*
@@ -117,8 +120,8 @@ object Tracked {
* @tparam T Tracked type of both input and output wrappers.
* @return A future returning a Tracked type of the result of the original future.
*/
- def exposeFuture[V, T[X] <: Tracked[X]]( fut: T[Future[V]] )( implicit exec: ExecutionContext ): Future[T[V]]
- = fut.value.map( freplace( fut ) )( exec )
+ def exposeFuture[V, T[X] <: Tracked[X]](fut: T[Future[V]])(implicit exec: ExecutionContext): Future[T[V]] =
+ fut.value.map(freplace(fut))(exec)
/** Merge the tracking information of a sequence of Tracked wrappers.
* NOTE: CONDITIONS APPLY! CHECK THE TRACKED CLASSES FOLD FUNCTION.
@@ -128,15 +131,14 @@ object Tracked {
* @tparam T Tracked type of both input and output wrappers.
* @return Tracked wrapper of type `T` containing a list of `Msg` objects.
*/
- def flatten[Msg, T[X] <: Tracked[X]]( consuming: Seq[T[Msg]] ): T[Seq[Msg]] = {
- require( consuming.nonEmpty )
+ def flatten[Msg, T[X] <: Tracked[X]](consuming: Seq[T[Msg]]): T[Seq[Msg]] = {
+ require(consuming.nonEmpty)
- val foldStart: T[Seq[Msg]] = fmap[T, Msg, Seq[Msg]]( Seq(_) )( consuming.head )
+ val foldStart: T[Seq[Msg]] = fmap[T, Msg, Seq[Msg]](Seq(_))(consuming.head)
- def combine( tseq: T[Seq[Msg]], tmsg: T[Msg] ): T[Seq[Msg]]
- = tseq.fold[Msg]( _ :+ _ )( tmsg ).asInstanceOf[ T[Seq[Msg]] ]
+ def combine(tseq: T[Seq[Msg]], tmsg: T[Msg]): T[Seq[Msg]] = tseq.fold[Msg](_ :+ _)(tmsg).asInstanceOf[T[Seq[Msg]]]
- consuming.tail.foldLeft[T[Seq[Msg]]]( foldStart )( combine )
+ consuming.tail.foldLeft[T[Seq[Msg]]](foldStart)(combine)
}
/** Create a KafkaProducer from settings whilst explicitly un-setting the ClassLoader.
@@ -148,8 +150,9 @@ object Tracked {
* (Note: Use `lazyProducer` to minimize the number of new Producers which are created,
* this reduces the number of system resources used (such as file handles))
*/
- def createProducer[K, V]( settings: ProducerSettings[K, V] ): KafkaProducer[K, V]
- = withClassLoader( null ) { settings.lazyProducer }
+ def createProducer[K, V](settings: ProducerSettings[K, V]): KafkaProducer[K, V] = withClassLoader(null) {
+ settings.lazyProducer
+ }
}
/** A Tracked type which uses a `Committable` as tracking information.
@@ -164,26 +167,30 @@ trait HasCommittable[Value] extends Tracked[Value] {
*
* @tparam T The tracked wrapper type of the objects consumed by the Sink.
*/
-trait HasCommittableSinks[T[X] <: HasCommittable[X]]
- extends TrackedSink[T]
- with TrackedMultiSink[T] {
-
- override def sink[V <: AnyMsg]( implicit s: KafkaExecutorEnvironment ): Sink[HasCommittable[V], Future[Done]]
- = Producer.commitableSink( s.psAllMessages, Tracked.createProducer( s.psAllMessages ) )
- .contramap( msg =>
- ProducerMessage.Message(
- s.settings.record( msg.value ),
- msg.commit
- )
+trait HasCommittableSinks[T[X] <: HasCommittable[X]] extends TrackedSink[T] with TrackedMultiSink[T] {
+
+ override def sink[V <: AnyMsg](implicit s: KafkaExecutorEnvironment): Sink[HasCommittable[V], Future[Done]] =
+ Producer
+ .commitableSink(s.psAllMessages, Tracked.createProducer(s.psAllMessages))
+ .contramap(
+ msg =>
+ ProducerMessage.Message(
+ s.settings.record(msg.value),
+ msg.commit
+ )
)
- override def sinkMulti[V <: AnyMsg]( implicit s: KafkaExecutorEnvironment ): Sink[HasCommittable[Seq[V]], Future[Done]]
- = Producer.commitableSink( s.psAllMessages, Tracked.createProducer( s.psAllMessages ) )
- .contramap( msgs =>
- ProducerMessage.MultiMessage(
- msgs.value.map(s.settings.record).to,
- msgs.commit
- )
+ override def sinkMulti[V <: AnyMsg](
+ implicit s: KafkaExecutorEnvironment
+ ): Sink[HasCommittable[Seq[V]], Future[Done]] =
+ Producer
+ .commitableSink(s.psAllMessages, Tracked.createProducer(s.psAllMessages))
+ .contramap(
+ msgs =>
+ ProducerMessage.MultiMessage(
+ msgs.value.map(s.settings.record).to,
+ msgs.commit
+ )
)
}
@@ -196,38 +203,38 @@ trait HasCommittableSinks[T[X] <: HasCommittable[X]]
case class CommitTracked[Value](
value: Value,
commit: Committable
- ) extends Tracked[Value] with HasCommittable[Value] {
+) extends Tracked[Value]
+ with HasCommittable[Value] {
- override protected def map[NewValue](fn: Value => NewValue): Tracked[NewValue]
- = copy( value = fn( value ) )
+ override protected def map[NewValue](fn: Value => NewValue): Tracked[NewValue] = copy(value = fn(value))
- override protected def fold[NewValue](fn: (Value, NewValue) => Value)(other: Tracked[NewValue]): Tracked[Value]
- = copy(
- value = fn( value, other.value ),
- commit = CommitTracked.unsafeMerge( commit, other.asInstanceOf[CommitTracked[_]].commit )
+ override protected def fold[NewValue](fn: (Value, NewValue) => Value)(other: Tracked[NewValue]): Tracked[Value] =
+ copy(
+ value = fn(value, other.value),
+ commit = CommitTracked.unsafeMerge(commit, other.asInstanceOf[CommitTracked[_]].commit)
)
}
-object CommitTracked
- extends TrackedSource[CommitTracked]
- with HasCommittableSinks[CommitTracked] {
+object CommitTracked extends TrackedSource[CommitTracked] with HasCommittableSinks[CommitTracked] {
- def unsafeMerge( left: Committable, right: Committable ): Committable = {
- require( right.isInstanceOf[CommittableOffset] )
+ def unsafeMerge(left: Committable, right: Committable): Committable = {
+ require(right.isInstanceOf[CommittableOffset])
- ( left match {
- case offset: CommittableOffset => ConsumerMessage.emptyCommittableOffsetBatch.updated( offset )
+ (left match {
+ case offset: CommittableOffset => ConsumerMessage.emptyCommittableOffsetBatch.updated(offset)
case batch: CommittableOffsetBatch => batch
- }).updated( right.asInstanceOf[CommittableOffset] )
+ }).updated(right.asInstanceOf[CommittableOffset])
}
- def source[Value]( cs: ConsumerSettings[_, Value], sub: AutoSubscription ): Source[CommitTracked[Value], Control]
- = Consumer.committableSource( cs, sub )
- .map( msg =>
- CommitTracked(
- msg.record.value,
- msg.committableOffset
- )
+ def source[Value](cs: ConsumerSettings[_, Value], sub: AutoSubscription): Source[CommitTracked[Value], Control] =
+ Consumer
+ .committableSource(cs, sub)
+ .map(
+ msg =>
+ CommitTracked(
+ msg.record.value,
+ msg.committableOffset
+ )
)
}
@@ -236,8 +243,7 @@ object CommitTracked
*
* @tparam Value The type of the object contained by the tracking wrapper.
*/
-trait HasPartition[Value]
- extends Tracked[Value] {
+trait HasPartition[Value] extends Tracked[Value] {
/** Partition of the source message of this object.
*
@@ -254,41 +260,41 @@ trait HasPartition[Value]
* @tparam Value The type of the object contained by the tracking wrapper.
*/
case class PartTracked[Value](
- value: Value,
- commit: Committable,
- part: Int
-
- ) extends Tracked[Value]
- with HasCommittable[Value]
- with HasPartition[Value] {
+ value: Value,
+ commit: Committable,
+ part: Int
+) extends Tracked[Value]
+ with HasCommittable[Value]
+ with HasPartition[Value] {
- override protected def map[NewValue](fn: Value => NewValue): Tracked[NewValue]
- = copy( value = fn( value ) )
+ override protected def map[NewValue](fn: Value => NewValue): Tracked[NewValue] = copy(value = fn(value))
- override protected def fold[NewValue](fn: (Value, NewValue) => Value)(other: Tracked[NewValue]): Tracked[Value]
- = copy(
- value = fn( value, other.value ),
- commit = CommitTracked.unsafeMerge( commit, other.asInstanceOf[PartTracked[_]].commit )
+ override protected def fold[NewValue](fn: (Value, NewValue) => Value)(other: Tracked[NewValue]): Tracked[Value] =
+ copy(
+ value = fn(value, other.value),
+ commit = CommitTracked.unsafeMerge(commit, other.asInstanceOf[PartTracked[_]].commit)
)
}
-object PartTracked
- extends TrackedSource[PartTracked]
- with HasCommittableSinks[PartTracked] {
-
- def source[Value]( cs: ConsumerSettings[_, Value], sub: AutoSubscription ): Source[PartTracked[Value], Control]
- = Consumer.committablePartitionedSource( cs, sub )
- .flatMapMerge( Int.MaxValue, {
- case (topicPartition, source) =>
- println(s"Started consuming partition $topicPartition.")
- source.map( msg =>
- PartTracked(
- msg.record.value,
- msg.committableOffset,
- topicPartition.partition()
+object PartTracked extends TrackedSource[PartTracked] with HasCommittableSinks[PartTracked] {
+
+ def source[Value](cs: ConsumerSettings[_, Value], sub: AutoSubscription): Source[PartTracked[Value], Control] =
+ Consumer
+ .committablePartitionedSource(cs, sub)
+ .flatMapMerge(
+ Int.MaxValue, {
+ case (topicPartition, source) =>
+ println(s"Started consuming partition $topicPartition.")
+ source.map(
+ msg =>
+ PartTracked(
+ msg.record.value,
+ msg.committableOffset,
+ topicPartition.partition()
+ )
)
- )
- })
+ }
+ )
}
@@ -304,66 +310,70 @@ object PartTracked
case class Transaction[Value](
value: Value,
partOffset: PartitionOffset
-
- ) extends Tracked[Value] with HasPartition[Value] {
+) extends Tracked[Value]
+ with HasPartition[Value] {
def offset: Long = partOffset.offset
override def part: Int = partOffset.key.partition
- override protected def map[NewValue](fn: Value => NewValue): Tracked[NewValue]
- = copy( value = fn( value ) )
+ override protected def map[NewValue](fn: Value => NewValue): Tracked[NewValue] = copy(value = fn(value))
override protected def fold[NewValue](fn: (Value, NewValue) => Value)(other: Tracked[NewValue]): Tracked[Value] = {
- assert( assertion = false, "This function should work, but causes hangs when used with Transactional.sink" )
+ assert(assertion = false, "This function should work, but causes hangs when used with Transactional.sink")
- require( other.isInstanceOf[Transaction[NewValue]] )
+ require(other.isInstanceOf[Transaction[NewValue]])
val that = other.asInstanceOf[Transaction[NewValue]]
- require( partOffset.key == that.partOffset.key, "Trying to merge offsets of different partitions." )
- require( partOffset.offset + 1 == that.partOffset.offset, "Skipping Consumer message." )
+ require(partOffset.key == that.partOffset.key, "Trying to merge offsets of different partitions.")
+ require(partOffset.offset + 1 == that.partOffset.offset, "Skipping Consumer message.")
copy(
- value = fn( value, that.value ),
- partOffset = partOffset.copy( offset = Math.max( offset, that.offset ) )
+ value = fn(value, that.value),
+ partOffset = partOffset.copy(offset = Math.max(offset, that.offset))
)
}
}
-object Transaction
- extends TrackedSource[Transaction]
- with TrackedSink[Transaction]
- with TrackedMultiSink[Transaction] {
+object Transaction extends TrackedSource[Transaction] with TrackedSink[Transaction] with TrackedMultiSink[Transaction] {
- implicit val meh: TrackedSource[Transaction] = this
- implicit val meh2: TrackedSink[Transaction] = this
+ implicit val meh: TrackedSource[Transaction] = this
+ implicit val meh2: TrackedSink[Transaction] = this
implicit val meh3: TrackedMultiSink[Transaction] = this
- override def source[Value]( cs: ConsumerSettings[_, Value], sub: AutoSubscription ): Source[Transaction[Value], Control]
- = Transactional.source( cs, sub )
- .map( msg => Transaction( msg.record.value(), msg.partitionOffset ) )
+ override def source[Value](
+ cs: ConsumerSettings[_, Value],
+ sub: AutoSubscription
+ ): Source[Transaction[Value], Control] =
+ Transactional
+ .source(cs, sub)
+ .map(msg => Transaction(msg.record.value(), msg.partitionOffset))
- override def sink[V <: AnyMsg]( implicit s: KafkaExecutorEnvironment ): Sink[Transaction[V], Future[Done]] = {
+ override def sink[V <: AnyMsg](implicit s: KafkaExecutorEnvironment): Sink[Transaction[V], Future[Done]] = {
// TODO: Construct the KafkaProducer with the correct ClassLoader
val id = ObjectId.get.toString
- Transactional.sink( s.psAllMessages, id )
- .contramap( msg =>
- ProducerMessage.Message(
- s.settings.record( msg.value ),
- msg.partOffset
+ Transactional
+ .sink(s.psAllMessages, id)
+ .contramap(
+ msg =>
+ ProducerMessage.Message(
+ s.settings.record(msg.value),
+ msg.partOffset
+ )
)
- )
}
- override def sinkMulti[V <: AnyMsg]( implicit s: KafkaExecutorEnvironment ): Sink[Transaction[Seq[V]], Future[Done]] = {
+ override def sinkMulti[V <: AnyMsg](implicit s: KafkaExecutorEnvironment): Sink[Transaction[Seq[V]], Future[Done]] = {
// TODO: Construct the KafkaProducer with the correct ClassLoader
val id = ObjectId.get.toString
- Transactional.sink( s.psAllMessages, id )
- .contramap( msgs =>
- ProducerMessage.MultiMessage(
- msgs.value.map(s.settings.record).to,
- msgs.partOffset
+ Transactional
+ .sink(s.psAllMessages, id)
+ .contramap(
+ msgs =>
+ ProducerMessage.MultiMessage(
+ msgs.value.map(s.settings.record).to,
+ msgs.partOffset
+ )
)
- )
}
}
@@ -373,35 +383,34 @@ object Transaction
* @tparam Value The type of the object contained by the tracking wrapper.
*/
case class Untracked[Value](
- value: Value
- ) extends Tracked[Value] {
+ value: Value
+) extends Tracked[Value] {
- override protected def map[NewValue](fn: Value => NewValue): Tracked[NewValue]
- = copy( value = fn( value ) )
+ override protected def map[NewValue](fn: Value => NewValue): Tracked[NewValue] = copy(value = fn(value))
- override protected def fold[NewValue](fn: (Value, NewValue) => Value)(other: Tracked[NewValue]): Tracked[Value]
- = copy( value = fn( value, other.value ) )
+ override protected def fold[NewValue](fn: (Value, NewValue) => Value)(other: Tracked[NewValue]): Tracked[Value] =
+ copy(value = fn(value, other.value))
}
-object Untracked
- extends TrackedSource[Untracked]
- with TrackedSink[Untracked] {
+object Untracked extends TrackedSource[Untracked] with TrackedSink[Untracked] {
- def source[Value]( messages: Seq[Value] ): Source[Untracked[Value], NotUsed]
- = Source.fromIterator( () => messages.iterator ).map( Untracked(_) )
+ def source[Value](messages: Seq[Value]): Source[Untracked[Value], NotUsed] =
+ Source.fromIterator(() => messages.iterator).map(Untracked(_))
- override def source[Value]( cs: ConsumerSettings[_, Value], sub: AutoSubscription ): Source[Untracked[Value], Control]
- = Consumer.committableSource( cs, sub )
- .mapAsync(1)( message =>
- message
- .committableOffset
- .commitScaladsl()
- .map( _ => Untracked( message.record.value ) )( ExecutionContext.global )
+ override def source[Value](cs: ConsumerSettings[_, Value], sub: AutoSubscription): Source[Untracked[Value], Control] =
+ Consumer
+ .committableSource(cs, sub)
+ .mapAsync(1)(
+ message =>
+ message.committableOffset
+ .commitScaladsl()
+ .map(_ => Untracked(message.record.value))(ExecutionContext.global)
)
- override def sink[Value <: AnyMsg]( implicit s: KafkaExecutorEnvironment ): Sink[Untracked[Value], Future[Done]]
- = Producer.plainSink( s.psAllMessages, Tracked.createProducer( s.psAllMessages ) )
- .contramap( (msg: Untracked[Value]) => s.settings.record( msg.value ) )
+ override def sink[Value <: AnyMsg](implicit s: KafkaExecutorEnvironment): Sink[Untracked[Value], Future[Done]] =
+ Producer
+ .plainSink(s.psAllMessages, Tracked.createProducer(s.psAllMessages))
+ .contramap((msg: Untracked[Value]) => s.settings.record(msg.value))
}
@@ -412,40 +421,38 @@ case class MockTransaction[Value](
part: Int,
minOffset: Long,
maxOffset: Long
+) extends Tracked[Value]
+ with HasPartition[Value] {
- ) extends Tracked[Value] with HasPartition[Value] {
-
- def this( value: Value, part: Int, offset: Long )
- = this( value, part, offset, offset + 1 )
+ def this(value: Value, part: Int, offset: Long) = this(value, part, offset, offset + 1)
def consuming: Long = maxOffset - minOffset
- override protected def map[NewValue](fn: Value => NewValue): Tracked[NewValue]
- = copy( value = fn( value ) )
+ override protected def map[NewValue](fn: Value => NewValue): Tracked[NewValue] = copy(value = fn(value))
override protected def fold[NewValue](fn: (Value, NewValue) => Value)(other: Tracked[NewValue]): Tracked[Value] = {
- require( other.isInstanceOf[MockTransaction[NewValue]] )
+ require(other.isInstanceOf[MockTransaction[NewValue]])
val that: MockTransaction[NewValue] = other.asInstanceOf[MockTransaction[NewValue]]
// Strict checks to emulate requirements merging requirements of real Transactions.
- require( part == that.part, s"Cannot merge messages on different partitions" )
- require( that.minOffset + 1 == that.maxOffset, s"Target message cannot already be aggregated!" )
- require( minOffset < maxOffset, s"Messages must be consumed in chronological order!" )
- require( that.minOffset < that.maxOffset, s"Messages must be consumed in chronological order!" )
- require( that.minOffset == maxOffset, s"Messages cannot be skipped! ($maxOffset -> ${that.minOffset})" )
+ require(part == that.part, s"Cannot merge messages on different partitions")
+ require(that.minOffset + 1 == that.maxOffset, s"Target message cannot already be aggregated!")
+ require(minOffset < maxOffset, s"Messages must be consumed in chronological order!")
+ require(that.minOffset < that.maxOffset, s"Messages must be consumed in chronological order!")
+ require(that.minOffset == maxOffset, s"Messages cannot be skipped! ($maxOffset -> ${that.minOffset})")
- copy( value = fn( value, that.value ), maxOffset = that.maxOffset )
+ copy(value = fn(value, that.value), maxOffset = that.maxOffset)
}
}
object MockTransaction {
- def source[Value]( messages: Seq[(Value, Int)] ): Source[MockTransaction[Value], NotUsed]
- = Source
- .fromIterator( () => messages.iterator )
- .groupBy( Int.MaxValue, _._2 )
- .zip( Source(1 to Int.MaxValue) )
- .map({ case ((msg, part), i) => new MockTransaction( msg, part, i ) })
+ def source[Value](messages: Seq[(Value, Int)]): Source[MockTransaction[Value], NotUsed] =
+ Source
+ .fromIterator(() => messages.iterator)
+ .groupBy(Int.MaxValue, _._2)
+ .zip(Source(1 to Int.MaxValue))
+ .map({ case ((msg, part), i) => new MockTransaction(msg, part, i) })
.mergeSubstreams
}
@@ -456,29 +463,26 @@ case class MockTracked[Value](
value: Value,
part: Int,
consuming: Long
+) extends Tracked[Value] {
- ) extends Tracked[Value] {
-
- override protected def map[NewValue](fn: Value => NewValue): Tracked[NewValue]
- = copy( value = fn( value ) )
+ override protected def map[NewValue](fn: Value => NewValue): Tracked[NewValue] = copy(value = fn(value))
override protected def fold[NewValue](fn: (Value, NewValue) => Value)(other: Tracked[NewValue]): Tracked[Value] = {
- require( other.isInstanceOf[MockTracked[NewValue]] )
+ require(other.isInstanceOf[MockTracked[NewValue]])
val that: MockTracked[NewValue] = other.asInstanceOf[MockTracked[NewValue]]
- copy( value = fn( value, that.value ), consuming = consuming + that.consuming )
+ copy(value = fn(value, that.value), consuming = consuming + that.consuming)
}
}
object MockTracked {
- def apply[T]( mock: MockTransaction[T] ): MockTracked[T]
- = MockTracked( mock.value, mock.part, mock.maxOffset - mock.minOffset )
+ def apply[T](mock: MockTransaction[T]): MockTracked[T] =
+ MockTracked(mock.value, mock.part, mock.maxOffset - mock.minOffset)
- def source[Value]( messages: Seq[(Value, Int)] ): Source[MockTracked[Value], NotUsed]
- = Source
- .fromIterator( () => messages.iterator )
+ def source[Value](messages: Seq[(Value, Int)]): Source[MockTracked[Value], NotUsed] =
+ Source
+ .fromIterator(() => messages.iterator)
.map({ case (v, p) => MockTracked(v, p, 1) })
}
-
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/settings/KafkaExecutorSettings.scala b/src/com/workflowfm/pew/stateless/instances/kafka/settings/KafkaExecutorSettings.scala
index a9df5dc..edc0d2e 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/settings/KafkaExecutorSettings.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/settings/KafkaExecutorSettings.scala
@@ -20,16 +20,16 @@ abstract class KafkaExecutorEnvironment {
val materializer: Materializer
// Kafka - PiiId keyed consumer topic settings
- val csPiiHistory: ConsumerSettings[KeyPiiId, PiiHistory]
- val csSequenceRequest: ConsumerSettings[KeyPiiId, SequenceRequest]
- val csReduceRequest: ConsumerSettings[KeyPiiId, ReduceRequest]
- val csResult: ConsumerSettings[KeyPiiId, PiiLog]
+ val csPiiHistory: ConsumerSettings[KeyPiiId, PiiHistory]
+ val csSequenceRequest: ConsumerSettings[KeyPiiId, SequenceRequest]
+ val csReduceRequest: ConsumerSettings[KeyPiiId, ReduceRequest]
+ val csResult: ConsumerSettings[KeyPiiId, PiiLog]
// Kafka - (PiiId, CallRef) keyed consumer topic settings
- val csAssignment: ConsumerSettings[KeyPiiIdCall, Assignment]
+ val csAssignment: ConsumerSettings[KeyPiiIdCall, Assignment]
// Kafka - All producer settings
- val psAllMessages: ProducerSettings[AnyKey, AnyMsg]
+ val psAllMessages: ProducerSettings[AnyKey, AnyMsg]
}
@@ -37,8 +37,8 @@ object KafkaExecutorSettings {
// Kafka - Topic Keys
sealed trait AnyKey
- case class KeyPiiId( piiId: ObjectId ) extends AnyKey
- case class KeyPiiIdCall( piiId: ObjectId, ref: CallRef) extends AnyKey
+ case class KeyPiiId(piiId: ObjectId) extends AnyKey
+ case class KeyPiiIdCall(piiId: ObjectId, ref: CallRef) extends AnyKey
type AnyRes = Any
@@ -63,7 +63,7 @@ abstract class KafkaExecutorSettings {
// Kafka - Debug output
def logMessageReceived(msg: Any): Unit = println(s"Received: $msg.")
- def logMessageSent(msg: Any): Unit = println(s"Sent: $msg.")
+ def logMessageSent(msg: Any): Unit = println(s"Sent: $msg.")
// Kafka - Topic Names
type TopicN = String
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/BsonKafkaExecutorSettings.scala b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/BsonKafkaExecutorSettings.scala
index 6a4bf8d..7e6bbce 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/BsonKafkaExecutorSettings.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/BsonKafkaExecutorSettings.scala
@@ -13,40 +13,37 @@ import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
import scala.reflect.ClassTag
-class BsonKafkaExecutorSettings( val reg: CodecRegistry )
- extends KafkaExecutorSettings {
+class BsonKafkaExecutorSettings(val reg: CodecRegistry) extends KafkaExecutorSettings {
import KafkaExecutorSettings._
import com.workflowfm.pew.stateless.StatelessMessages._
// Kafka - Topic Names
override val tnReduceRequest: TopicN = "ReduceRequest"
- override val tnPiiHistory: TopicN = "PiiHistory"
- override val tnAssignment: TopicN = "Assignment"
- override val tnResult: TopicN = "Result"
+ override val tnPiiHistory: TopicN = "PiiHistory"
+ override val tnAssignment: TopicN = "Assignment"
+ override val tnResult: TopicN = "Result"
- override val serverAndPort: String = "localhost:9092"
+ override val serverAndPort: String = "localhost:9092"
override val defaultGroupId: String = "Default-Group"
- def consSettings[K, V]( implicit ctK: ClassTag[K], ctV: ClassTag[V], actorSys: ActorSystem )
- : ConsumerSettings[K, V] = {
+ def consSettings[K, V](implicit ctK: ClassTag[K], ctV: ClassTag[V], actorSys: ActorSystem): ConsumerSettings[K, V] = {
import ConsumerConfig._
ConsumerSettings
- .create( actorSys, CodecWrapper[K](ctK, reg), CodecWrapper[V](ctV, reg) )
- .withBootstrapServers( serverAndPort )
- .withGroupId( defaultGroupId )
- .withProperty( AUTO_OFFSET_RESET_CONFIG, "earliest" )
- .withWakeupTimeout( 10.seconds )
+ .create(actorSys, CodecWrapper[K](ctK, reg), CodecWrapper[V](ctV, reg))
+ .withBootstrapServers(serverAndPort)
+ .withGroupId(defaultGroupId)
+ .withProperty(AUTO_OFFSET_RESET_CONFIG, "earliest")
+ .withWakeupTimeout(10.seconds)
}
- def prodSettings[K, V]( implicit ctK: ClassTag[K], ctV: ClassTag[V], actorSys: ActorSystem )
- : ProducerSettings[K, V] = {
+ def prodSettings[K, V](implicit ctK: ClassTag[K], ctV: ClassTag[V], actorSys: ActorSystem): ProducerSettings[K, V] = {
ProducerSettings
- .create( actorSys, CodecWrapper[K](ctK, reg), CodecWrapper[V](ctV, reg) )
- .withBootstrapServers( serverAndPort )
+ .create(actorSys, CodecWrapper[K](ctK, reg), CodecWrapper[V](ctV, reg))
+ .withBootstrapServers(serverAndPort)
}
override def createEnvironment(): KafkaExecutorEnvironment = {
@@ -54,14 +51,14 @@ class BsonKafkaExecutorSettings( val reg: CodecRegistry )
override val settings: KafkaExecutorSettings = BsonKafkaExecutorSettings.this
- override val context: ExecutionContext = ExecutionContext.global
- implicit override val actors: ActorSystem = ActorSystem( s"ActorSys" )
- override val materializer: Materializer = ActorMaterializer.create( actors )
+ override val context: ExecutionContext = ExecutionContext.global
+ implicit override val actors: ActorSystem = ActorSystem(s"ActorSys")
+ override val materializer: Materializer = ActorMaterializer.create(actors)
// Kafka - PiiId keyed consumer topic settings
- override val csPiiHistory: ConsumerSettings[KeyPiiId, PiiHistory] = consSettings
+ override val csPiiHistory: ConsumerSettings[KeyPiiId, PiiHistory] = consSettings
override val csSequenceRequest: ConsumerSettings[KeyPiiId, SequenceRequest] = consSettings
- override val csReduceRequest: ConsumerSettings[KeyPiiId, ReduceRequest] = consSettings
+ override val csReduceRequest: ConsumerSettings[KeyPiiId, ReduceRequest] = consSettings
// Jev, new results listeners only care about messages after their instantiation.
// Additionally, they cannot fall back on the old offset as they have a unique group-id.
@@ -78,12 +75,12 @@ class BsonKafkaExecutorSettings( val reg: CodecRegistry )
}
override def record: AnyMsg => ProducerRecord[AnyKey, AnyMsg] = {
- case m: PiiUpdate => new ProducerRecord( tnPiiHistory, KeyPiiId(m.pii.id), m )
- case m: SequenceRequest => new ProducerRecord( tnPiiHistory, KeyPiiId(m.piiId), m )
- case m: SequenceFailure => new ProducerRecord( tnPiiHistory, KeyPiiId(m.piiId), m )
- case m: ReduceRequest => new ProducerRecord( tnReduceRequest, KeyPiiId(m.pii.id), m )
- case m: Assignment => new ProducerRecord( tnAssignment, KeyPiiIdCall(m.pii.id, m.callRef), m )
- case m: PiiLog => new ProducerRecord( tnResult, KeyPiiId(m.piiId), m )
+ case m: PiiUpdate => new ProducerRecord(tnPiiHistory, KeyPiiId(m.pii.id), m)
+ case m: SequenceRequest => new ProducerRecord(tnPiiHistory, KeyPiiId(m.piiId), m)
+ case m: SequenceFailure => new ProducerRecord(tnPiiHistory, KeyPiiId(m.piiId), m)
+ case m: ReduceRequest => new ProducerRecord(tnReduceRequest, KeyPiiId(m.pii.id), m)
+ case m: Assignment => new ProducerRecord(tnAssignment, KeyPiiIdCall(m.pii.id, m.callRef), m)
+ case m: PiiLog => new ProducerRecord(tnResult, KeyPiiId(m.piiId), m)
}
}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/CodecWrapper.scala b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/CodecWrapper.scala
index d2ebfeb..d21236e 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/CodecWrapper.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/CodecWrapper.scala
@@ -18,8 +18,7 @@ import scala.reflect.ClassTag
* @param codec Wrapped codec to leverage for serialization.
* @tparam T Type that is being serialized.
*/
-class CodecWrapper[T](codec: Codec[T] )
- extends Deserializer[T] with Serializer[T] {
+class CodecWrapper[T](codec: Codec[T]) extends Deserializer[T] with Serializer[T] {
private val deCtx: DecoderContext = DecoderContext.builder().build()
private val enCtx: EncoderContext = EncoderContext.builder().build()
@@ -27,15 +26,15 @@ class CodecWrapper[T](codec: Codec[T] )
override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {}
override def deserialize(topic: String, data: Array[Byte]): T = {
- val buffer: ByteBuffer = ByteBuffer.wrap( data )
- val reader: BsonReader = new BsonBinaryReader( buffer )
- codec.decode( reader, deCtx )
+ val buffer: ByteBuffer = ByteBuffer.wrap(data)
+ val reader: BsonReader = new BsonBinaryReader(buffer)
+ codec.decode(reader, deCtx)
}
override def serialize(topic: String, data: T): Array[Byte] = {
- val buffer = new BasicOutputBuffer()
- val writer: BsonWriter = new BsonBinaryWriter( buffer )
- codec.encode( writer, data, enCtx )
+ val buffer = new BasicOutputBuffer()
+ val writer: BsonWriter = new BsonBinaryWriter(buffer)
+ codec.encode(writer, data, enCtx)
buffer.getInternalBuffer
}
@@ -46,10 +45,9 @@ object CodecWrapper {
private val cache: mutable.Map[Codec[_], CodecWrapper[_]] = mutable.Map()
- def apply[T]( implicit ct: ClassTag[T], reg: CodecRegistry ): CodecWrapper[T]
- = this.synchronized {
+ def apply[T](implicit ct: ClassTag[T], reg: CodecRegistry): CodecWrapper[T] = this.synchronized {
- val codec: Codec[T] = reg.get[T]( ct.runtimeClass.asInstanceOf[Class[T]] )
- cache.getOrElseUpdate( codec, new CodecWrapper[T](codec) ).asInstanceOf[CodecWrapper[T]]
+ val codec: Codec[T] = reg.get[T](ct.runtimeClass.asInstanceOf[Class[T]])
+ cache.getOrElseUpdate(codec, new CodecWrapper[T](codec)).asInstanceOf[CodecWrapper[T]]
}
}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/KafkaCodecRegistry.scala b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/KafkaCodecRegistry.scala
index 8e35b21..c353a27 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/KafkaCodecRegistry.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/KafkaCodecRegistry.scala
@@ -19,73 +19,71 @@ import org.mongodb.scala.bson.codecs.DEFAULT_CODEC_REGISTRY
class KafkaCodecRegistry(
processes: PiProcessStore,
override val baseRegistry: CodecRegistry = DEFAULT_CODEC_REGISTRY
-
- ) extends CodecProvider with AutoCodecRegistryExt {
+) extends CodecProvider
+ with AutoCodecRegistryExt {
/** AnyCodec for encoding arbitrary objects. Uses lazy value
* to support being fully overridden by subclasses.
* (Note: All types must have codecs present in this object
* when it is actually time to encode/decode them at runtime)
*/
- protected lazy val anyc: Codec[Any] = registerCodec( new AnyCodec( this ) )
+ protected lazy val anyc: Codec[Any] = registerCodec(new AnyCodec(this))
- registerCodec( new BoxedUnitCodec )
- registerCodec( new Tuple2Codec( anyc ) )
- registerCodec( EitherCodec( anyc ) )
- registerCodec( OptionCodec( anyc ) )
+ registerCodec(new BoxedUnitCodec)
+ registerCodec(new Tuple2Codec(anyc))
+ registerCodec(EitherCodec(anyc))
+ registerCodec(OptionCodec(anyc))
/** A local type of PiObjectCodec to ensure the `anyCodec` used
* matches the one overridden here.
*/
- class LocalPiObjectCodec extends PiObjectCodec( this ) {
+ class LocalPiObjectCodec extends PiObjectCodec(this) {
override lazy val anyCodec: Codec[Any] = anyc
}
// Keep explicit references to these PEW codec instances,
// We don't have a registry that includes them.
- private val idc = registerCodec( new ObjectIdCodec() )
- private val procc = registerCodec( new PiProcessCodec( processes ) )
- private val obj = registerCodec( new LocalPiObjectCodec )
- private val term = registerCodec( new TermCodec(this) )
- private val chan = registerCodec( new ChanCodec )
- private val chanMap = registerCodec( new ChanMapCodec(this) )
- private val piRes = registerCodec( new PiResourceCodec(this) )
- private val fut = registerCodec( new PiFutureCodec(this) )
- private val piState = registerCodec( new PiStateCodec(this, processes) )
- private val piInst = registerCodec( new PiInstanceCodec(this, processes) )
- private val meta = registerCodec( new PiMetadataMapCodec(anyc) )
-
+ private val idc = registerCodec(new ObjectIdCodec())
+ private val procc = registerCodec(new PiProcessCodec(processes))
+ private val obj = registerCodec(new LocalPiObjectCodec)
+ private val term = registerCodec(new TermCodec(this))
+ private val chan = registerCodec(new ChanCodec)
+ private val chanMap = registerCodec(new ChanMapCodec(this))
+ private val piRes = registerCodec(new PiResourceCodec(this))
+ private val fut = registerCodec(new PiFutureCodec(this))
+ private val piState = registerCodec(new PiStateCodec(this, processes))
+ private val piInst = registerCodec(new PiInstanceCodec(this, processes))
+ private val meta = registerCodec(new PiMetadataMapCodec(anyc))
- registerCodec( new PiEventCallCodec[ObjectId]( idc, obj, procc, meta ) )
- registerCodec( new PiEventExceptionCodec[ObjectId]( idc, meta ) )
- registerCodec( new PiEventProcessExceptionCodec[ObjectId]( idc, meta ) )
- registerCodec( new PiEventResultCodec[ObjectId]( piInst, anyc, meta ) )
- registerCodec( new PiEventReturnCodec[ObjectId]( idc, anyc, meta ) )
- registerCodec( new PiEventStartCodec[ObjectId]( piInst, meta ) )
- registerCodec( new PiFailureAtomicProcessIsCompositeCodec[ObjectId]( piInst, meta ) )
- registerCodec( new PiFailureNoResultCodec[ObjectId]( piInst, meta ) )
- registerCodec( new PiFailureNoSuchInstanceCodec[ObjectId]( idc, meta ) )
- registerCodec( new PiFailureUnknownProcessCodec[ObjectId]( piInst, meta ) )
- private val peExEvent = registerCodec( new SuperclassCodec[PiFailure[ObjectId]] )
- private val peEvent = registerCodec( new SuperclassCodec[PiEvent[ObjectId]] )
+ registerCodec(new PiEventCallCodec[ObjectId](idc, obj, procc, meta))
+ registerCodec(new PiEventExceptionCodec[ObjectId](idc, meta))
+ registerCodec(new PiEventProcessExceptionCodec[ObjectId](idc, meta))
+ registerCodec(new PiEventResultCodec[ObjectId](piInst, anyc, meta))
+ registerCodec(new PiEventReturnCodec[ObjectId](idc, anyc, meta))
+ registerCodec(new PiEventStartCodec[ObjectId](piInst, meta))
+ registerCodec(new PiFailureAtomicProcessIsCompositeCodec[ObjectId](piInst, meta))
+ registerCodec(new PiFailureNoResultCodec[ObjectId](piInst, meta))
+ registerCodec(new PiFailureNoSuchInstanceCodec[ObjectId](idc, meta))
+ registerCodec(new PiFailureUnknownProcessCodec[ObjectId](piInst, meta))
+ private val peExEvent = registerCodec(new SuperclassCodec[PiFailure[ObjectId]])
+ private val peEvent = registerCodec(new SuperclassCodec[PiEvent[ObjectId]])
// These use the PEW-REST Key codecs, need to be initialised after PEW
- private val callRef = registerCodec( new CallRefCodec )
- private val keyPiiId = registerCodec( new KeyPiiIdCodec )
- private val keyPiiIdCall = registerCodec( new KeyPiiIdCallCodec( callRef ) )
+ private val callRef = registerCodec(new CallRefCodec)
+ private val keyPiiId = registerCodec(new KeyPiiIdCodec)
+ private val keyPiiIdCall = registerCodec(new KeyPiiIdCallCodec(callRef))
// These use the PEW-REST Msg codecs, need to be initialised after PEW
- private val update = registerCodec( new PiiUpdateCodec( piInst ) )
- private val assgn = registerCodec( new AssignmentCodec( piInst, callRef, piRes ) )
- private val seqReq = registerCodec( new SequenceRequestCodec( callRef, obj ) )
- private val seqfail = registerCodec( new SequenceFailureCodec( piInst, callRef, obj, peExEvent ) )
- private val redReq = registerCodec( new ReduceRequestCodec( piInst, callRef, obj ) )
- private val res = registerCodec( new PiiLogCodec( peEvent ) )
-
- private val piiHistory = registerCodec( new SuperclassCodec[PiiHistory] )
+ private val update = registerCodec(new PiiUpdateCodec(piInst))
+ private val assgn = registerCodec(new AssignmentCodec(piInst, callRef, piRes))
+ private val seqReq = registerCodec(new SequenceRequestCodec(callRef, obj))
+ private val seqfail = registerCodec(new SequenceFailureCodec(piInst, callRef, obj, peExEvent))
+ private val redReq = registerCodec(new ReduceRequestCodec(piInst, callRef, obj))
+ private val res = registerCodec(new PiiLogCodec(peEvent))
+ private val piiHistory = registerCodec(new SuperclassCodec[PiiHistory])
// Initialised after both Keys & Msgs as it depends on them all.
- private val anykey = registerCodec( new SuperclassCodec[AnyKey] )
- val anymsg: Codec[AnyMsg] = registerCodec( new SuperclassCodec[AnyMsg] )
+ private val anykey = registerCodec(new SuperclassCodec[AnyKey])
+ val anymsg: Codec[AnyMsg] = registerCodec(new SuperclassCodec[AnyMsg])
}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/PewCodecs.scala b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/PewCodecs.scala
index 759a83a..42fc86e 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/PewCodecs.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/PewCodecs.scala
@@ -10,36 +10,37 @@ object PewCodecs {
import KafkaExecutorSettings._
import StatelessMessages._
- type PiiT = PiInstance[ObjectId]
- type PiResT = PiResource
- type ResMsgT = PiiLog
+ type PiiT = PiInstance[ObjectId]
+ type PiResT = PiResource
+ type ResMsgT = PiiLog
// val PIEVENT: Class[PiEvent[ObjectId]] = classOf[PiEvent[ObjectId]]
- val PISTART: Class[PiEventStart[ObjectId]] = classOf[PiEventStart[ObjectId]]
- val PIRESULT: Class[PiEventResult[ObjectId]] = classOf[PiEventResult[ObjectId]]
- val PICALL: Class[PiEventCall[ObjectId]] = classOf[PiEventCall[ObjectId]]
- val PIRETURN: Class[PiEventReturn[ObjectId]] = classOf[PiEventReturn[ObjectId]]
- val PINORES: Class[PiFailureNoResult[ObjectId]] = classOf[PiFailureNoResult[ObjectId]]
- val PIUNKNOWN: Class[PiFailureUnknownProcess[ObjectId]] = classOf[PiFailureUnknownProcess[ObjectId]]
+ val PISTART: Class[PiEventStart[ObjectId]] = classOf[PiEventStart[ObjectId]]
+ val PIRESULT: Class[PiEventResult[ObjectId]] = classOf[PiEventResult[ObjectId]]
+ val PICALL: Class[PiEventCall[ObjectId]] = classOf[PiEventCall[ObjectId]]
+ val PIRETURN: Class[PiEventReturn[ObjectId]] = classOf[PiEventReturn[ObjectId]]
+ val PINORES: Class[PiFailureNoResult[ObjectId]] = classOf[PiFailureNoResult[ObjectId]]
+ val PIUNKNOWN: Class[PiFailureUnknownProcess[ObjectId]] = classOf[PiFailureUnknownProcess[ObjectId]]
val PIFAPIS: Class[PiFailureAtomicProcessIsComposite[ObjectId]] = classOf[PiFailureAtomicProcessIsComposite[ObjectId]]
- val PIFNSI: Class[PiFailureNoSuchInstance[ObjectId]] = classOf[PiFailureNoSuchInstance[ObjectId]]
- val PIEXCEPT: Class[PiFailureExceptions[ObjectId]] = classOf[PiFailureExceptions[ObjectId]]
- val PIPROCEXCEPT: Class[PiFailureAtomicProcessException[ObjectId]] = classOf[PiFailureAtomicProcessException[ObjectId]]
-
- val ANY_KEY: Class[AnyKey] = classOf[AnyKey]
- val ANY_MSG: Class[AnyMsg] = classOf[AnyMsg]
- val ANY_RES: Class[AnyRes] = classOf[AnyRes]
-
- val THROWABLE: Class[Throwable] = classOf[Throwable]
- val CALL_REF: Class[CallRef] = classOf[CallRef]
- val KEY_PII_ID: Class[KeyPiiId] = classOf[KeyPiiId]
- val KEY_PII_ID_CALL: Class[KeyPiiIdCall] = classOf[KeyPiiIdCall]
- val PII_UPDATE: Class[PiiUpdate] = classOf[PiiUpdate]
- val ASSIGNMENT: Class[Assignment] = classOf[Assignment]
- val PII_HISTORY: Class[PiiHistory] = classOf[PiiHistory]
- val REDUCE_REQUEST: Class[ReduceRequest] = classOf[ReduceRequest]
- val SEQUENCE_REQ: Class[SequenceRequest] = classOf[SequenceRequest]
- val SEQFAIL_REQ: Class[SequenceFailure] = classOf[SequenceFailure]
- val PIILOG: Class[PiiLog] = classOf[PiiLog]
+ val PIFNSI: Class[PiFailureNoSuchInstance[ObjectId]] = classOf[PiFailureNoSuchInstance[ObjectId]]
+ val PIEXCEPT: Class[PiFailureExceptions[ObjectId]] = classOf[PiFailureExceptions[ObjectId]]
+ val PIPROCEXCEPT: Class[PiFailureAtomicProcessException[ObjectId]] =
+ classOf[PiFailureAtomicProcessException[ObjectId]]
+
+ val ANY_KEY: Class[AnyKey] = classOf[AnyKey]
+ val ANY_MSG: Class[AnyMsg] = classOf[AnyMsg]
+ val ANY_RES: Class[AnyRes] = classOf[AnyRes]
+
+ val THROWABLE: Class[Throwable] = classOf[Throwable]
+ val CALL_REF: Class[CallRef] = classOf[CallRef]
+ val KEY_PII_ID: Class[KeyPiiId] = classOf[KeyPiiId]
+ val KEY_PII_ID_CALL: Class[KeyPiiIdCall] = classOf[KeyPiiIdCall]
+ val PII_UPDATE: Class[PiiUpdate] = classOf[PiiUpdate]
+ val ASSIGNMENT: Class[Assignment] = classOf[Assignment]
+ val PII_HISTORY: Class[PiiHistory] = classOf[PiiHistory]
+ val REDUCE_REQUEST: Class[ReduceRequest] = classOf[ReduceRequest]
+ val SEQUENCE_REQ: Class[SequenceRequest] = classOf[SequenceRequest]
+ val SEQFAIL_REQ: Class[SequenceFailure] = classOf[SequenceFailure]
+ val PIILOG: Class[PiiLog] = classOf[PiiLog]
}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/content/CallRefCodec.scala b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/content/CallRefCodec.scala
index aa97e84..2afcb08 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/content/CallRefCodec.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/content/CallRefCodec.scala
@@ -5,8 +5,7 @@ import com.workflowfm.pew.stateless.instances.kafka.settings.bson.codecs.PewCode
import org.bson._
import org.bson.codecs._
-class CallRefCodec
- extends Codec[CallRef] {
+class CallRefCodec extends Codec[CallRef] {
import PewCodecs._
@@ -15,18 +14,18 @@ class CallRefCodec
override def decode(reader: BsonReader, ctx: DecoderContext): CallRef = {
reader.readStartDocument()
- reader.readName( idN )
+ reader.readName(idN)
val id = reader.readInt32()
reader.readEndDocument()
- CallRef( id )
+ CallRef(id)
}
override def encode(writer: BsonWriter, value: CallRef, ctx: EncoderContext): Unit = {
writer.writeStartDocument()
- writer.writeName( idN )
- writer.writeInt32( value.id )
+ writer.writeName(idN)
+ writer.writeInt32(value.id)
writer.writeEndDocument()
}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/keys/KeyPiiIdCallCodec.scala b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/keys/KeyPiiIdCallCodec.scala
index 3dc105c..403fd5e 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/keys/KeyPiiIdCallCodec.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/keys/KeyPiiIdCallCodec.scala
@@ -6,10 +6,9 @@ import com.workflowfm.pew.stateless.instances.kafka.settings.KafkaExecutorSettin
import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
import org.bson.{BsonReader, BsonWriter}
-class KeyPiiIdCallCodec( refCodec: Codec[CallRef] )
- extends ClassCodec[KeyPiiIdCall] {
+class KeyPiiIdCallCodec(refCodec: Codec[CallRef]) extends ClassCodec[KeyPiiIdCall] {
- val idN = "id"
+ val idN = "id"
val callRefN = "ref"
override def decodeBody(reader: BsonReader, ctx: DecoderContext): KeyPiiIdCall = {
@@ -18,17 +17,17 @@ class KeyPiiIdCallCodec( refCodec: Codec[CallRef] )
val piiId = reader.readObjectId()
reader.readName(callRefN)
- val ref: CallRef = ctx.decodeWithChildContext( refCodec, reader )
+ val ref: CallRef = ctx.decodeWithChildContext(refCodec, reader)
- KeyPiiIdCall( piiId, ref )
+ KeyPiiIdCall(piiId, ref)
}
override def encodeBody(writer: BsonWriter, key: KeyPiiIdCall, ctx: EncoderContext): Unit = {
writer.writeName(idN)
- writer.writeObjectId( key.piiId )
+ writer.writeObjectId(key.piiId)
writer.writeName(callRefN)
- ctx.encodeWithChildContext( refCodec, writer, key.ref )
+ ctx.encodeWithChildContext(refCodec, writer, key.ref)
}
}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/keys/KeyPiiIdCodec.scala b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/keys/KeyPiiIdCodec.scala
index 73a3f24..939ca58 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/keys/KeyPiiIdCodec.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/keys/KeyPiiIdCodec.scala
@@ -10,14 +10,14 @@ class KeyPiiIdCodec extends ClassCodec[KeyPiiId] {
val idN = "id"
override def decodeBody(reader: BsonReader, ctx: DecoderContext): KeyPiiId = {
- reader.readName( idN )
+ reader.readName(idN)
val piiId = reader.readObjectId()
- KeyPiiId( piiId )
+ KeyPiiId(piiId)
}
override def encodeBody(writer: BsonWriter, key: KeyPiiId, ctx: EncoderContext): Unit = {
- writer.writeName( idN )
- writer.writeObjectId( key.piiId )
+ writer.writeName(idN)
+ writer.writeObjectId(key.piiId)
}
}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/AssignmentCodec.scala b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/AssignmentCodec.scala
index 544b1a6..70fed23 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/AssignmentCodec.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/AssignmentCodec.scala
@@ -9,65 +9,62 @@ import org.bson._
import org.bson.codecs._
class AssignmentCodec(
- piiCodec: Codec[PiiT],
+ piiCodec: Codec[PiiT],
callRefCodec: Codec[CallRef],
- piResCodec: Codec[PiResT]
-
- ) extends ClassCodec[Assignment] {
+ piResCodec: Codec[PiResT]
+) extends ClassCodec[Assignment] {
import com.workflowfm.pew.mongodb.bson.BsonUtil._
val uuidN = "uuid"
- val piiN = "pii"
- val refN = "ref"
+ val piiN = "pii"
+ val refN = "ref"
val procN = "proc"
val argsN = "args"
- val resN = "res"
+ val resN = "res"
override def decodeBody(reader: BsonReader, ctx: DecoderContext): Assignment = {
- reader.readName( piiN )
- val pii = ctx.decodeWithChildContext( piiCodec, reader )
+ reader.readName(piiN)
+ val pii = ctx.decodeWithChildContext(piiCodec, reader)
- reader.readName( refN )
- val callRef: CallRef = ctx.decodeWithChildContext( callRefCodec, reader )
+ reader.readName(refN)
+ val callRef: CallRef = ctx.decodeWithChildContext(callRefCodec, reader)
- reader.readName( procN )
+ reader.readName(procN)
val proc = reader.readString()
- val args: List[PiResource]
- = readArray( reader, argsN ) { () =>
- reader.readStartDocument()
+ val args: List[PiResource] = readArray(reader, argsN) { () =>
+ reader.readStartDocument()
- reader.readName( refN )
- val arg = ctx.decodeWithChildContext( piResCodec, reader )
+ reader.readName(refN)
+ val arg = ctx.decodeWithChildContext(piResCodec, reader)
- reader.readEndDocument()
- arg
- }
+ reader.readEndDocument()
+ arg
+ }
- Assignment( pii, callRef, proc, args )
+ Assignment(pii, callRef, proc, args)
}
override def encodeBody(writer: BsonWriter, value: Assignment, ctx: EncoderContext): Unit = {
- writer.writeName( piiN )
- ctx.encodeWithChildContext( piiCodec, writer, value.pii )
+ writer.writeName(piiN)
+ ctx.encodeWithChildContext(piiCodec, writer, value.pii)
- writer.writeName( refN )
- ctx.encodeWithChildContext( callRefCodec, writer, value.callRef )
+ writer.writeName(refN)
+ ctx.encodeWithChildContext(callRefCodec, writer, value.callRef)
- writer.writeName( procN )
- writer.writeString( value.process )
+ writer.writeName(procN)
+ writer.writeString(value.process)
- writeArray( writer, argsN, value.args ) {
- arg =>
- writer.writeStartDocument()
+ writeArray(writer, argsN, value.args) { arg =>
+ writer.writeStartDocument()
- writer.writeName( refN )
- ctx.encodeWithChildContext( piResCodec, writer, arg )
+ writer.writeName(refN)
+ ctx.encodeWithChildContext(piResCodec, writer, arg)
- writer.writeEndDocument()
+ writer.writeEndDocument()
}
}
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/PiiLogCodec.scala b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/PiiLogCodec.scala
index 44bdfe2..5fbaa0f 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/PiiLogCodec.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/PiiLogCodec.scala
@@ -7,21 +7,20 @@ import org.bson._
import org.bson.codecs._
import org.bson.types.ObjectId
-class PiiLogCodec( eventCodec: Codec[PiEvent[ObjectId]] )
- extends ClassCodec[PiiLog] {
+class PiiLogCodec(eventCodec: Codec[PiEvent[ObjectId]]) extends ClassCodec[PiiLog] {
val eventN: String = "event"
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiiLog = {
- reader.readName( eventN )
+ reader.readName(eventN)
val event: PiEvent[ObjectId] = ctx.decodeWithChildContext(eventCodec, reader)
- PiiLog( event )
+ PiiLog(event)
}
override def encodeBody(writer: BsonWriter, value: PiiLog, ctx: EncoderContext): Unit = {
- writer.writeName( eventN )
+ writer.writeName(eventN)
ctx.encodeWithChildContext(eventCodec, writer, value.event)
}
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/PiiUpdateCodec.scala b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/PiiUpdateCodec.scala
index 3acab20..1242715 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/PiiUpdateCodec.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/PiiUpdateCodec.scala
@@ -8,22 +8,21 @@ import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
import org.bson.types.ObjectId
import org.bson.{BsonReader, BsonWriter}
-class PiiUpdateCodec( piiCodec: Codec[PiiT] )
- extends ClassCodec[PiiUpdate] {
+class PiiUpdateCodec(piiCodec: Codec[PiiT]) extends ClassCodec[PiiUpdate] {
val piiN = "pii"
override def decodeBody(reader: BsonReader, ctx: DecoderContext): PiiUpdate = {
- reader.readName( piiN )
- val piiVal: PiInstance[ObjectId] = ctx.decodeWithChildContext( piiCodec, reader )
+ reader.readName(piiN)
+ val piiVal: PiInstance[ObjectId] = ctx.decodeWithChildContext(piiCodec, reader)
- PiiUpdate( piiVal )
+ PiiUpdate(piiVal)
}
override def encodeBody(writer: BsonWriter, value: PiiUpdate, ctx: EncoderContext): Unit = {
- writer.writeName( piiN )
- ctx.encodeWithChildContext( piiCodec, writer, value.pii )
+ writer.writeName(piiN)
+ ctx.encodeWithChildContext(piiCodec, writer, value.pii)
}
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/ReduceRequestCodec.scala b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/ReduceRequestCodec.scala
index a733c66..b48437d 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/ReduceRequestCodec.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/ReduceRequestCodec.scala
@@ -9,54 +9,54 @@ import org.bson.codecs.{Codec, DecoderContext, EncoderContext}
import org.bson.types.ObjectId
import org.bson.{BsonReader, BsonWriter}
-class ReduceRequestCodec( piiCodec: Codec[PiiT], refCodec: Codec[CallRef], objCodec: Codec[PiObject] )
- extends ClassCodec[ReduceRequest] {
+class ReduceRequestCodec(piiCodec: Codec[PiiT], refCodec: Codec[CallRef], objCodec: Codec[PiObject])
+ extends ClassCodec[ReduceRequest] {
import com.workflowfm.pew.mongodb.bson.BsonUtil._
- val piiN = "pii"
+ val piiN = "pii"
val argsN = "args"
- val refN = "ref"
- val objN = "obj"
+ val refN = "ref"
+ val objN = "obj"
override def decodeBody(reader: BsonReader, ctx: DecoderContext): ReduceRequest = {
- reader.readName( piiN )
- val pii: PiInstance[ObjectId] = ctx.decodeWithChildContext( piiCodec, reader )
+ reader.readName(piiN)
+ val pii: PiInstance[ObjectId] = ctx.decodeWithChildContext(piiCodec, reader)
- val args = readArray( reader, argsN ) { () =>
+ val args = readArray(reader, argsN) { () =>
reader.readStartDocument()
- reader.readName( refN )
- val ref = ctx.decodeWithChildContext( refCodec, reader )
+ reader.readName(refN)
+ val ref = ctx.decodeWithChildContext(refCodec, reader)
- reader.readName( objN )
- val obj = ctx.decodeWithChildContext( objCodec, reader )
+ reader.readName(objN)
+ val obj = ctx.decodeWithChildContext(objCodec, reader)
reader.readEndDocument()
(ref, obj)
}
- ReduceRequest( pii, args )
+ ReduceRequest(pii, args)
}
override def encodeBody(writer: BsonWriter, value: ReduceRequest, ctx: EncoderContext): Unit = {
- writer.writeName( piiN )
- ctx.encodeWithChildContext( piiCodec, writer, value.pii )
+ writer.writeName(piiN)
+ ctx.encodeWithChildContext(piiCodec, writer, value.pii)
- writeArray( writer, argsN, value.args ) {
+ writeArray(writer, argsN, value.args) {
case (ref, obj) =>
writer.writeStartDocument()
- writer.writeName( refN )
- ctx.encodeWithChildContext( refCodec, writer, ref )
+ writer.writeName(refN)
+ ctx.encodeWithChildContext(refCodec, writer, ref)
- writer.writeName( objN )
- ctx.encodeWithChildContext( objCodec, writer, obj )
+ writer.writeName(objN)
+ ctx.encodeWithChildContext(objCodec, writer, obj)
writer.writeEndDocument()
}
}
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/SequenceFailureCodec.scala b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/SequenceFailureCodec.scala
index efe12be..fd0eea1 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/SequenceFailureCodec.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/SequenceFailureCodec.scala
@@ -13,38 +13,37 @@ class SequenceFailureCodec(
refCodec: Codec[CallRef],
objCodec: Codec[PiObject],
errCodec: Codec[PiFailure[ObjectId]]
-
- ) extends ClassCodec[SequenceFailure] {
+) extends ClassCodec[SequenceFailure] {
import com.workflowfm.pew.mongodb.bson.BsonUtil._
- val hasPiiN = "hasPii"
- val piiN = "pii"
- val resultsN = "results"
+ val hasPiiN = "hasPii"
+ val piiN = "pii"
+ val resultsN = "results"
val failuresN = "failures"
- val refN = "ref"
- val hasObjN = "hasObj"
- val objN = "obj"
- val failN = "err"
+ val refN = "ref"
+ val hasObjN = "hasObj"
+ val objN = "obj"
+ val failN = "err"
override def decodeBody(reader: BsonReader, ctx: DecoderContext): SequenceFailure = {
- val hasPii: Boolean = reader.readBoolean( hasPiiN )
- reader.readName( piiN )
+ val hasPii: Boolean = reader.readBoolean(hasPiiN)
+ reader.readName(piiN)
val eitherPii =
if (hasPii)
- Right( ctx.decodeWithChildContext( piiCodec, reader ) )
+ Right(ctx.decodeWithChildContext(piiCodec, reader))
else
- Left( reader.readObjectId() )
+ Left(reader.readObjectId())
- val results = readArray( reader, resultsN ) { () =>
+ val results = readArray(reader, resultsN) { () =>
reader.readStartDocument()
- reader.readName( refN )
- val ref: CallRef = ctx.decodeWithChildContext( refCodec, reader )
+ reader.readName(refN)
+ val ref: CallRef = ctx.decodeWithChildContext(refCodec, reader)
- val hasObj: Boolean = reader.readBoolean( hasObjN )
+ val hasObj: Boolean = reader.readBoolean(hasObjN)
val obj: PiObject =
if (hasObj) {
@@ -56,8 +55,8 @@ class SequenceFailureCodec(
(ref, obj)
}
- val failures = readArray( reader, failuresN ) { () =>
- ctx.decodeWithChildContext( errCodec, reader )
+ val failures = readArray(reader, failuresN) { () =>
+ ctx.decodeWithChildContext(errCodec, reader)
}
SequenceFailure(
@@ -69,22 +68,22 @@ class SequenceFailureCodec(
override def encodeBody(writer: BsonWriter, value: SequenceFailure, ctx: EncoderContext): Unit = {
- writer.writeBoolean( hasPiiN, value.pii.isRight )
- writer.writeName( piiN )
+ writer.writeBoolean(hasPiiN, value.pii.isRight)
+ writer.writeName(piiN)
value.pii match {
- case Right( _pii ) => ctx.encodeWithChildContext( piiCodec, writer, _pii )
- case Left( _piiId ) => writer.writeObjectId( _piiId )
+ case Right(_pii) => ctx.encodeWithChildContext(piiCodec, writer, _pii)
+ case Left(_piiId) => writer.writeObjectId(_piiId)
}
- writeArray( writer, resultsN, value.returns ) {
- case ( ref, obj ) =>
+ writeArray(writer, resultsN, value.returns) {
+ case (ref, obj) =>
writer.writeStartDocument()
- writer.writeName( refN )
- ctx.encodeWithChildContext( refCodec, writer, ref )
+ writer.writeName(refN)
+ ctx.encodeWithChildContext(refCodec, writer, ref)
- writer.writeBoolean( hasObjN, obj != null )
+ writer.writeBoolean(hasObjN, obj != null)
if (obj != null) {
writer.writeName(objN)
@@ -94,8 +93,8 @@ class SequenceFailureCodec(
writer.writeEndDocument()
}
- writeArray( writer, failuresN, value.errors ) {
- ctx.encodeWithChildContext( errCodec, writer, _ )
+ writeArray(writer, failuresN, value.errors) {
+ ctx.encodeWithChildContext(errCodec, writer, _)
}
}
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/SequenceRequestCodec.scala b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/SequenceRequestCodec.scala
index 7cd500e..3e082a8 100644
--- a/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/SequenceRequestCodec.scala
+++ b/src/com/workflowfm/pew/stateless/instances/kafka/settings/bson/codecs/messages/SequenceRequestCodec.scala
@@ -10,38 +10,38 @@ import org.bson.{BsonReader, BsonWriter}
class SequenceRequestCodec(
refCodec: Codec[CallRef],
objCodec: Codec[PiObject]
- ) extends ClassCodec[SequenceRequest] {
+) extends ClassCodec[SequenceRequest] {
- val idN = "_id"
+ val idN = "_id"
val refN = "callRef"
val objN = "piObj"
override def decodeBody(reader: BsonReader, ctx: DecoderContext): SequenceRequest = {
- reader.readName( idN )
+ reader.readName(idN)
val piiId = reader.readObjectId()
- reader.readName( refN )
- val ref: CallRef = ctx.decodeWithChildContext( refCodec, reader )
+ reader.readName(refN)
+ val ref: CallRef = ctx.decodeWithChildContext(refCodec, reader)
- reader.readName( objN )
- val obj: PiObject = ctx.decodeWithChildContext( objCodec, reader )
+ reader.readName(objN)
+ val obj: PiObject = ctx.decodeWithChildContext(objCodec, reader)
- SequenceRequest( piiId, (ref, obj) )
+ SequenceRequest(piiId, (ref, obj))
}
override def encodeBody(writer: BsonWriter, value: SequenceRequest, ctx: EncoderContext): Unit = {
- writer.writeName( idN )
- writer.writeObjectId( value.piiId )
+ writer.writeName(idN)
+ writer.writeObjectId(value.piiId)
val (callRef, piObj) = value.request
- writer.writeName( refN )
- ctx.encodeWithChildContext( refCodec, writer, callRef )
+ writer.writeName(refN)
+ ctx.encodeWithChildContext(refCodec, writer, callRef)
- writer.writeName( objN )
- ctx.encodeWithChildContext( objCodec, writer, piObj )
+ writer.writeName(objN)
+ ctx.encodeWithChildContext(objCodec, writer, piObj)
}
-}
\ No newline at end of file
+}
diff --git a/src/com/workflowfm/pew/util/ClassLoaderUtil.scala b/src/com/workflowfm/pew/util/ClassLoaderUtil.scala
index 1ea2d96..ebe1856 100644
--- a/src/com/workflowfm/pew/util/ClassLoaderUtil.scala
+++ b/src/com/workflowfm/pew/util/ClassLoaderUtil.scala
@@ -10,13 +10,13 @@ object ClassLoaderUtil {
* @tparam T The return type of `fnWrapped`
* @return The returned value of `fnWrapped`
*/
- def withClassLoader[T]( tmpClassLoader: ClassLoader )( fnWrapped: => T ): T = {
+ def withClassLoader[T](tmpClassLoader: ClassLoader)(fnWrapped: => T): T = {
val pushedClassLoader = Thread.currentThread().getContextClassLoader
- try{
- Thread.currentThread().setContextClassLoader( tmpClassLoader )
+ try {
+ Thread.currentThread().setContextClassLoader(tmpClassLoader)
fnWrapped
} finally {
- Thread.currentThread().setContextClassLoader( pushedClassLoader )
+ Thread.currentThread().setContextClassLoader(pushedClassLoader)
}
}
diff --git a/src/com/workflowfm/pew/util/ClassMap.scala b/src/com/workflowfm/pew/util/ClassMap.scala
index 304440d..e981d49 100644
--- a/src/com/workflowfm/pew/util/ClassMap.scala
+++ b/src/com/workflowfm/pew/util/ClassMap.scala
@@ -2,29 +2,27 @@ package com.workflowfm.pew.util
import scala.reflect.ClassTag
-class ClassMap[CommonT]( elements: Seq[CommonT] ) {
+class ClassMap[CommonT](elements: Seq[CommonT]) {
type ClassKey = Class[_ <: CommonT]
- protected val idMap: Map[String, Seq[ClassKey]]
- = elements
- .map( _.getClass )
- .groupBy[String]( _.getSimpleName )
+ protected val idMap: Map[String, Seq[ClassKey]] = elements
+ .map(_.getClass)
+ .groupBy[String](_.getSimpleName)
- protected val classMap: Map[ClassKey, Seq[CommonT]]
- = elements
- .groupBy[ClassKey]( _.getClass )
- .withDefaultValue( Seq[CommonT]() )
+ protected val classMap: Map[ClassKey, Seq[CommonT]] = elements
+ .groupBy[ClassKey](_.getClass)
+ .withDefaultValue(Seq[CommonT]())
/** Retrieve all elements in the ClassMap which are assignable from `SubT`.
*
* @return A sequence of elements of type `SubT`.
*/
- def apply[SubT <: CommonT]( implicit ct: ClassTag[SubT] ): Seq[SubT] = {
+ def apply[SubT <: CommonT](implicit ct: ClassTag[SubT]): Seq[SubT] = {
val rtClass: ClassKey = ct.runtimeClass.asInstanceOf[ClassKey]
def getMembers(key: ClassKey, instances: Seq[CommonT]): Seq[SubT] = {
- if ( rtClass isAssignableFrom key )
+ if (rtClass isAssignableFrom key)
instances map (_.asInstanceOf[SubT])
else
Seq()
@@ -33,15 +31,14 @@ class ClassMap[CommonT]( elements: Seq[CommonT] ) {
classMap.flatMap((getMembers _).tupled).toSeq
}
- def byName( name: String ): Seq[CommonT] = classMap( idMap( name ).head )
+ def byName(name: String): Seq[CommonT] = classMap(idMap(name).head)
- def filter( fn: CommonT => Boolean ): ClassMap[CommonT]
- = new ClassMap[CommonT]( elements filter fn )
+ def filter(fn: CommonT => Boolean): ClassMap[CommonT] = new ClassMap[CommonT](elements filter fn)
}
object ClassMap {
- def apply[T]( args: T* ): ClassMap[T] = new ClassMap( args.toSeq )
+ def apply[T](args: T*): ClassMap[T] = new ClassMap(args.toSeq)
}