actually build the samples

and fix the resulting breakage (of not compiling them for some time,
that is)

also remove the last casts to FiniteDuration
This commit is contained in:
Roland 2012-10-15 17:17:54 +02:00
parent bff79c2f94
commit b96d77c15a
14 changed files with 17 additions and 17 deletions

View file

@ -160,7 +160,7 @@ class ResizerSpec extends AkkaSpec(ResizerSpec.config) with DefaultTimeout with
// sending in too quickly will result in skipped resize due to many resizeInProgress conflicts
Thread.sleep(20.millis.dilated.toMillis)
}
within((((d * loops).asInstanceOf[FiniteDuration] / resizer.lowerBound) + 2.seconds.dilated).asInstanceOf[FiniteDuration]) {
within((d * loops / resizer.lowerBound) + 2.seconds.dilated) {
for (m 0 until loops) expectMsg("done")
}
}

View file

@ -231,7 +231,7 @@ class ProducerFeatureTest extends WordSpec with BeforeAndAfterAll with BeforeAnd
def stopGracefully(actors: ActorRef*)(implicit timeout: Timeout) {
val deadline = timeout.duration.fromNow
for (a actors)
Await.result(gracefulStop(a, deadline.timeLeft.asInstanceOf[FiniteDuration]), deadline.timeLeft) must be === true
Await.result(gracefulStop(a, deadline.timeLeft), deadline.timeLeft) must be === true
}
}

View file

@ -93,7 +93,7 @@ private[cluster] final class ClusterHeartbeatSender extends Actor with ActorLogg
selfAddress.toString, MonitoredByNrOfMembers)
// start periodic heartbeat to other nodes in cluster
val heartbeatTask = scheduler.schedule(PeriodicTasksInitialDelay.max(HeartbeatInterval).asInstanceOf[FiniteDuration],
val heartbeatTask = scheduler.schedule(PeriodicTasksInitialDelay max HeartbeatInterval,
HeartbeatInterval, self, HeartbeatTick)
override def preStart(): Unit = cluster.subscribe(self, classOf[MemberEvent])

View file

@ -61,13 +61,13 @@ private[cluster] class ClusterMetricsCollector(publisher: ActorRef) extends Acto
/**
* Start periodic gossip to random nodes in cluster
*/
val gossipTask = scheduler.schedule(PeriodicTasksInitialDelay.max(MetricsGossipInterval).asInstanceOf[FiniteDuration],
val gossipTask = scheduler.schedule(PeriodicTasksInitialDelay max MetricsGossipInterval,
MetricsGossipInterval, self, GossipTick)
/**
* Start periodic metrics collection
*/
val metricsTask = scheduler.schedule(PeriodicTasksInitialDelay.max(MetricsInterval).asInstanceOf[FiniteDuration],
val metricsTask = scheduler.schedule(PeriodicTasksInitialDelay max MetricsInterval,
MetricsInterval, self, MetricsTick)
override def preStart(): Unit = {

View file

@ -339,7 +339,7 @@ private[akka] class ThrottleActor(channelContext: ChannelHandlerContext)
else {
val microsToSend = (now - d.lastSent) / 1000
val (s1, s2) = split(d.queue.head, (microsToSend * d.rateMBit / 8).toInt)
(d.copy(queue = s2 +: d.queue.tail), toSend :+ s1, Some(((timeForPacket - now).nanos min packetSplitThreshold).asInstanceOf[FiniteDuration]))
(d.copy(queue = s2 +: d.queue.tail), toSend :+ s1, Some((timeForPacket - now).nanos min packetSplitThreshold))
}
}
}

View file

@ -24,7 +24,7 @@ class HttpProducer(transformer: ActorRef) extends Actor with Producer {
def endpointUri = "jetty://http://akka.io/?bridgeEndpoint=true"
override def transformOutgoingMessage(msg: Any) = msg match {
case msg: CamelMessage msg.withHeaders(msg.headers(Set(Exchange.HTTP_PATH)))
case msg: CamelMessage msg.copy(headers = msg.headers(Set(Exchange.HTTP_PATH)))
}
override def routeResponse(msg: Any) {

View file

@ -19,6 +19,6 @@ class FileConsumer(uri: String) extends Consumer {
def endpointUri = uri
def receive = {
case msg: CamelMessage
println("Received file %s with content:\n%s".format(msg.getHeader(Exchange.FILE_NAME), msg.bodyAs[String]))
println("Received file %s with content:\n%s".format(msg.headers(Exchange.FILE_NAME), msg.bodyAs[String]))
}
}

View file

@ -6,7 +6,7 @@ import java.util.concurrent.TimeUnit;
import sample.cluster.stats.japi.StatsMessages.JobFailed;
import sample.cluster.stats.japi.StatsMessages.StatsResult;
import scala.concurrent.util.Duration;
import scala.concurrent.duration.Duration;
import akka.actor.ActorRef;
import akka.actor.ReceiveTimeout;
import akka.actor.UntypedActor;

View file

@ -10,8 +10,8 @@ import sample.cluster.stats.japi.StatsMessages.JobFailed;
import sample.cluster.stats.japi.StatsMessages.StatsJob;
import sample.cluster.stats.japi.StatsMessages.StatsResult;
import scala.concurrent.forkjoin.ThreadLocalRandom;
import scala.concurrent.util.Duration;
import scala.concurrent.util.FiniteDuration;
import scala.concurrent.duration.Duration;
import scala.concurrent.duration.FiniteDuration;
import akka.actor.ActorRef;
import akka.actor.Address;
import akka.actor.Cancellable;

View file

@ -4,7 +4,7 @@ import java.util.concurrent.TimeUnit;
import sample.cluster.transformation.japi.TransformationMessages.TransformationJob;
import scala.concurrent.ExecutionContext;
import scala.concurrent.util.Duration;
import scala.concurrent.duration.Duration;
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.actor.Props;

View file

@ -5,9 +5,9 @@ package sample.fsm.buncher
import akka.actor.ActorRefFactory
import scala.reflect.ClassTag
import scala.concurrent.util.Duration
import scala.concurrent.duration.Duration
import akka.actor.{ FSM, Actor, ActorRef }
import scala.concurrent.util.FiniteDuration
import scala.concurrent.duration.FiniteDuration
/*
* generic typed object buncher.

View file

@ -40,7 +40,7 @@ class CreationActor extends Actor {
case result: MathResult result match {
case MultiplicationResult(n1, n2, r)
println("Mul result: %d * %d = %d".format(n1, n2, r))
case DivisionResult(n1, n2, r)
case DivisionResult(n1, n2, r)
println("Div result: %.0f / %d = %.2f".format(n1, n2, r))
}
}

View file

@ -40,7 +40,7 @@ class LookupActor extends Actor {
def receive = {
case (actor: ActorRef, op: MathOp) actor ! op
case result: MathResult result match {
case AddResult(n1, n2, r)
case AddResult(n1, n2, r)
println("Add result: %d + %d = %d".format(n1, n2, r))
case SubtractResult(n1, n2, r)
println("Sub result: %d - %d = %d".format(n1, n2, r))

View file

@ -65,7 +65,7 @@ object AkkaBuild extends Build {
generatePdf in Sphinx <<= generatePdf in Sphinx in LocalProject(docs.id) map identity
),
aggregate = Seq(actor, testkit, actorTests, dataflow, remote, remoteTests, camel, cluster, slf4j, agent, transactor, mailboxes, zeroMQ, kernel, akkaSbtPlugin, osgi, osgiAries, docs, contrib)
aggregate = Seq(actor, testkit, actorTests, dataflow, remote, remoteTests, camel, cluster, slf4j, agent, transactor, mailboxes, zeroMQ, kernel, akkaSbtPlugin, osgi, osgiAries, docs, contrib, samples)
)
lazy val actor = Project(