Merge branch 'master' of github.com:jboner/akka

This commit is contained in:
Jonas Bonér 2011-04-20 09:29:29 +02:00
commit 6e6cd140ea
37 changed files with 282 additions and 388 deletions

View file

@ -425,7 +425,7 @@ class RoutingSpec extends WordSpec with MustMatchers {
}) })
def limit = 1 def limit = 1
def selectionCount = 2 def selectionCount = 1
def rampupRate = 0.1 def rampupRate = 0.1
def partialFill = true def partialFill = true
def instance = factory def instance = factory
@ -458,7 +458,7 @@ class RoutingSpec extends WordSpec with MustMatchers {
}) })
def limit = 2 def limit = 2
def selectionCount = 2 def selectionCount = 1
def rampupRate = 0.1 def rampupRate = 0.1
def partialFill = false def partialFill = false
def instance = factory def instance = factory

View file

@ -88,9 +88,6 @@ trait ActorRef extends ActorRefShared with java.lang.Comparable[ActorRef] { scal
protected[akka] var _uuid = newUuid protected[akka] var _uuid = newUuid
@volatile @volatile
protected[this] var _status: ActorRefInternals.StatusType = ActorRefInternals.UNSTARTED protected[this] var _status: ActorRefInternals.StatusType = ActorRefInternals.UNSTARTED
@volatile
protected[akka] var _futureTimeout: Option[ScheduledFuture[AnyRef]] = None
protected[akka] val guard = new ReentrantGuard
/** /**
* User overridable callback/setting. * User overridable callback/setting.
@ -572,20 +569,6 @@ trait ActorRef extends ActorRefShared with java.lang.Comparable[ActorRef] { scal
} }
override def toString = "Actor[" + id + ":" + uuid + "]" override def toString = "Actor[" + id + ":" + uuid + "]"
protected[akka] def checkReceiveTimeout = {
cancelReceiveTimeout
if (receiveTimeout.isDefined && dispatcher.mailboxSize(this) <= 0) { //Only reschedule if desired and there are currently no more messages to be processed
_futureTimeout = Some(Scheduler.scheduleOnce(this, ReceiveTimeout, receiveTimeout.get, TimeUnit.MILLISECONDS))
}
}
protected[akka] def cancelReceiveTimeout = {
if (_futureTimeout.isDefined) {
_futureTimeout.get.cancel(true)
_futureTimeout = None
}
}
} }
/** /**
@ -598,7 +581,10 @@ class LocalActorRef private[akka] (
val homeAddress: Option[InetSocketAddress], val homeAddress: Option[InetSocketAddress],
val clientManaged: Boolean = false) val clientManaged: Boolean = false)
extends ActorRef with ScalaActorRef { extends ActorRef with ScalaActorRef {
protected[akka] val guard = new ReentrantGuard
@volatile
protected[akka] var _futureTimeout: Option[ScheduledFuture[AnyRef]] = None
@volatile @volatile
private[akka] lazy val _linkedActors = new ConcurrentHashMap[Uuid, ActorRef] private[akka] lazy val _linkedActors = new ConcurrentHashMap[Uuid, ActorRef]
@volatile @volatile
@ -1102,6 +1088,21 @@ class LocalActorRef private[akka] (
actor.preStart // run actor preStart actor.preStart // run actor preStart
Actor.registry.register(this) Actor.registry.register(this)
} }
protected[akka] def checkReceiveTimeout = {
cancelReceiveTimeout
if (receiveTimeout.isDefined && dispatcher.mailboxSize(this) <= 0) { //Only reschedule if desired and there are currently no more messages to be processed
_futureTimeout = Some(Scheduler.scheduleOnce(this, ReceiveTimeout, receiveTimeout.get, TimeUnit.MILLISECONDS))
}
}
protected[akka] def cancelReceiveTimeout = {
if (_futureTimeout.isDefined) {
_futureTimeout.get.cancel(true)
_futureTimeout = None
}
}
} }
/** /**

View file

@ -104,7 +104,7 @@ trait DefaultActorPool extends ActorPool { this: Actor =>
/** /**
* Selectors * Selectors
* These traits define how, when a message needs to be routed, delegate(s) are chosen from the pool * These traits define how, when a message needs to be routed, delegate(s) are chosen from the pool
**/ */
/** /**
* Returns the set of delegates with the least amount of message backlog. * Returns the set of delegates with the least amount of message backlog.
@ -141,7 +141,7 @@ trait RoundRobinSelector {
else selectionCount else selectionCount
val set = val set =
for (i <- 0 to take) yield { for (i <- 0 until take) yield {
_last = (_last + 1) % length _last = (_last + 1) % length
delegates(_last) delegates(_last)
} }

View file

@ -8,7 +8,7 @@ PAPER =
BUILDDIR = _build BUILDDIR = _build
EASYINSTALL = easy_install EASYINSTALL = easy_install
LOCALPACKAGES = $(shell pwd)/$(BUILDDIR)/site-packages LOCALPACKAGES = $(shell pwd)/$(BUILDDIR)/site-packages
PYGMENTSDIR = pygments PYGMENTSDIR = _sphinx/pygments
# Internal variables. # Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_a4 = -D latex_paper_size=a4

View file

Before

Width:  |  Height:  |  Size: 5.8 KiB

After

Width:  |  Height:  |  Size: 5.8 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 6.6 KiB

After

Width:  |  Height:  |  Size: 6.6 KiB

Before After
Before After

View file

@ -30,13 +30,11 @@ div.footer {
/* link colors and text decoration */ /* link colors and text decoration */
a:link { a:link {
font-weight: bold;
text-decoration: none; text-decoration: none;
color: {{ theme_linkcolor }}; color: {{ theme_linkcolor }};
} }
a:visited { a:visited {
font-weight: bold;
text-decoration: none; text-decoration: none;
color: {{ theme_visitedlinkcolor }}; color: {{ theme_visitedlinkcolor }};
} }

View file

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 1 KiB

After

Width:  |  Height:  |  Size: 1 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 164 B

After

Width:  |  Height:  |  Size: 164 B

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 365 B

After

Width:  |  Height:  |  Size: 365 B

Before After
Before After

View file

@ -7,6 +7,6 @@ pygments_style = friendly
full_logo = false full_logo = false
textcolor = #333333 textcolor = #333333
headingcolor = #0c3762 headingcolor = #0c3762
linkcolor = #dc3c01 linkcolor = #0c3762
visitedlinkcolor = #892601 visitedlinkcolor = #0c3762
hoverlinkcolor = #ff4500 hoverlinkcolor = #0c3762

View file

@ -7,7 +7,7 @@ import sys, os
# -- General configuration ----------------------------------------------------- # -- General configuration -----------------------------------------------------
sys.path.append(os.path.abspath('exts')) sys.path.append(os.path.abspath('_sphinx/exts'))
extensions = ['sphinx.ext.todo', 'includecode'] extensions = ['sphinx.ext.todo', 'includecode']
templates_path = ['_templates'] templates_path = ['_templates']
@ -31,13 +31,13 @@ html_theme = 'akka'
html_theme_options = { html_theme_options = {
'full_logo': 'true' 'full_logo': 'true'
} }
html_theme_path = ['themes'] html_theme_path = ['_sphinx/themes']
html_title = 'Akka Documentation' html_title = 'Akka Documentation'
html_logo = '_static/logo.png' html_logo = '_sphinx/static/logo.png'
#html_favicon = None #html_favicon = None
html_static_path = ['_static'] html_static_path = ['_sphinx/static']
html_last_updated_fmt = '%b %d, %Y' html_last_updated_fmt = '%b %d, %Y'
#html_sidebars = {} #html_sidebars = {}
@ -65,4 +65,4 @@ latex_elements = {
'preamble': '\\definecolor{VerbatimColor}{rgb}{0.935,0.935,0.935}' 'preamble': '\\definecolor{VerbatimColor}{rgb}{0.935,0.935,0.935}'
} }
# latex_logo = '_static/akka.png' # latex_logo = '_sphinx/static/akka.png'

View file

@ -4,80 +4,11 @@ Contents
.. toctree:: .. toctree::
:maxdepth: 2 :maxdepth: 2
manual/getting-started-first-scala intro/index
manual/getting-started-first-java scala/index
manual/fsm-scala
.. pending/actor-registry-java
.. pending/actor-registry-scala
.. pending/actors-scala
.. pending/agents-scala
.. pending/articles
.. pending/benchmarks
.. pending/building-akka
.. pending/buildr
.. pending/cluster-membership
.. pending/companies-using-akka
.. pending/configuration
.. pending/dataflow-java
.. pending/dataflow-scala
.. pending/deployment-scenarios
.. pending/developer-guidelines
.. pending/dispatchers-java
.. pending/dispatchers-scala
.. pending/event-handler
.. pending/external-sample-projects
.. pending/fault-tolerance-java
.. pending/fault-tolerance-scala
.. pending/Feature Stability Matrix
.. pending/futures-scala
.. pending/getting-started
.. pending/guice-integration
.. pending/Home
.. pending/http
.. pending/issue-tracking
.. pending/language-bindings
.. pending/licenses
.. pending/logging
.. pending/Migration-1.0-1.1
.. pending/migration-guide-0.10.x-1.0.x
.. pending/migration-guide-0.7.x-0.8.x
.. pending/migration-guide-0.8.x-0.9.x
.. pending/migration-guide-0.9.x-0.10.x
.. pending/migration-guides
.. pending/Recipes
.. pending/release-notes
.. pending/remote-actors-java
.. pending/remote-actors-scala
.. pending/routing-java
.. pending/routing-scala
.. pending/scheduler
.. pending/security
.. pending/serialization-java
.. pending/serialization-scala
.. pending/servlet
.. pending/slf4j
.. pending/sponsors
.. pending/stm
.. pending/stm-java
.. pending/stm-scala
.. pending/team
.. pending/test
.. pending/testkit
.. pending/testkit-example
.. pending/third-party-integrations
.. pending/transactors-java
.. pending/transactors-scala
.. pending/tutorial-chat-server-java
.. pending/tutorial-chat-server-scala
.. pending/typed-actors-java
.. pending/typed-actors-scala
.. pending/untyped-actors-java
.. pending/use-cases
.. pending/web
Links Links
===== =====
* `Akka Documentation <http://doc.akka.io>`_
* `Support <http://scalablesolutions.se>`_ * `Support <http://scalablesolutions.se>`_

View file

@ -3,108 +3,120 @@ Building Akka
This page describes how to build and run Akka from the latest source code. This page describes how to build and run Akka from the latest source code.
.. contents:: :local:
Get the source code Get the source code
------------------- -------------------
Akka uses `Git <http://git-scm.com>`_ and is hosted at `Github <http://github.com>`_. Akka uses `Git <http://git-scm.com>`_ and is hosted at `Github
<http://github.com>`_.
You first need Git installed on your machine. You can then clone the source repositories: You first need Git installed on your machine. You can then clone the source
* Akka repository from `<http://github.com/jboner/akka>`_ repositories:
* Akka Modules repository from `<http://github.com/jboner/akka-modules>`_
For example: - Akka repository from `<http://github.com/jboner/akka>`_
- Akka Modules repository from `<http://github.com/jboner/akka-modules>`_
:: For example::
git clone git://github.com/jboner/akka.git git clone git://github.com/jboner/akka.git
git clone git://github.com/jboner/akka-modules.git git clone git://github.com/jboner/akka-modules.git
If you have already cloned the repositories previously then you can update the code with ``git pull``: If you have already cloned the repositories previously then you can update the
code with ``git pull``::
::
git pull origin master git pull origin master
SBT - Simple Build Tool SBT - Simple Build Tool
----------------------- -----------------------
Akka is using the excellent `SBT <http://code.google.com/p/simple-build-tool>`_ build system. So the first thing you have to do is to download and install SBT. You can read more about how to do that `here <http://code.google.com/p/simple-build-tool/wiki/Setup>`_ . Akka is using the excellent `SBT <http://code.google.com/p/simple-build-tool>`_
build system. So the first thing you have to do is to download and install
SBT. You can read more about how to do that `here
<http://code.google.com/p/simple-build-tool/wiki/Setup>`_ .
The SBT commands that you'll need to build Akka are all included below. If you want to find out more about SBT and using it for your own projects do read the `SBT documentation <http://code.google.com/p/simple-build-tool/wiki/RunningSbt>`_. The SBT commands that you'll need to build Akka are all included below. If you
want to find out more about SBT and using it for your own projects do read the
`SBT documentation
<http://code.google.com/p/simple-build-tool/wiki/RunningSbt>`_.
The Akka SBT build file is ``project/build/AkkaProject.scala`` with some properties defined in ``project/build.properties``. The Akka SBT build file is ``project/build/AkkaProject.scala`` with some
properties defined in ``project/build.properties``.
----
Building Akka Building Akka
------------- -------------
First make sure that you are in the akka code directory: First make sure that you are in the akka code directory::
::
cd akka cd akka
Fetching dependencies Fetching dependencies
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
SBT does not fetch dependencies automatically. You need to manually do this with the ``update`` command: SBT does not fetch dependencies automatically. You need to manually do this with
the ``update`` command::
::
sbt update sbt update
Once finished, all the dependencies for Akka will be in the ``lib_managed`` directory under each module: akka-actor, akka-stm, and so on. Once finished, all the dependencies for Akka will be in the ``lib_managed``
directory under each module: akka-actor, akka-stm, and so on.
*Note: you only need to run update the first time you are building the code,
or when the dependencies have changed.*
*Note: you only need to run {{update}} the first time you are building the code, or when the dependencies have changed.*
Building Building
^^^^^^^^ ^^^^^^^^
To compile all the Akka core modules use the ``compile`` command: To compile all the Akka core modules use the ``compile`` command::
::
sbt compile sbt compile
You can run all tests with the ``test`` command: You can run all tests with the ``test`` command::
::
sbt test sbt test
If compiling and testing are successful then you have everything working for the latest Akka development version. If compiling and testing are successful then you have everything working for the
latest Akka development version.
Publish to local Ivy repository Publish to local Ivy repository
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If you want to deploy the artifacts to your local Ivy repository (for example, to use from an SBT project) use the ``publish-local`` command: If you want to deploy the artifacts to your local Ivy repository (for example,
to use from an SBT project) use the ``publish-local`` command::
::
sbt publish-local sbt publish-local
Publish to local Maven repository Publish to local Maven repository
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If you want to deploy the artifacts to your local Maven repository use: If you want to deploy the artifacts to your local Maven repository use::
::
sbt publish-local publish sbt publish-local publish
SBT interactive mode SBT interactive mode
^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^
Note that in the examples above we are calling ``sbt compile`` and ``sbt test`` and so on. SBT also has an interactive mode. If you just run ``sbt`` you enter the interactive SBT prompt and can enter the commands directly. This saves starting up a new JVM instance for each command and can be much faster and more convenient. Note that in the examples above we are calling ``sbt compile`` and ``sbt test``
and so on. SBT also has an interactive mode. If you just run ``sbt`` you enter
the interactive SBT prompt and can enter the commands directly. This saves
starting up a new JVM instance for each command and can be much faster and more
convenient.
For example, building Akka as above is more commonly done like this: For example, building Akka as above is more commonly done like this:
:: .. code-block:: none
% sbt % sbt
[info] Building project akka 1.1-SNAPSHOT against Scala 2.8.1 [info] Building project akka 1.1-SNAPSHOT against Scala 2.9.0.RC1
[info] using AkkaParentProject with sbt 0.7.5.RC0 and Scala 2.7.7 [info] using AkkaParentProject with sbt 0.7.6.RC0 and Scala 2.7.7
> update > update
[info] [info]
[info] == akka-actor / update == [info] == akka-actor / update ==
@ -117,57 +129,59 @@ For example, building Akka as above is more commonly done like this:
> test > test
... ...
SBT batch mode SBT batch mode
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
It's also possible to combine commands in a single call. For example, updating, testing, and publishing Akka to the local Ivy repository can be done with: It's also possible to combine commands in a single call. For example, updating,
testing, and publishing Akka to the local Ivy repository can be done with::
::
sbt update test publish-local sbt update test publish-local
----
Building Akka Modules Building Akka Modules
--------------------- ---------------------
To build Akka Modules first build and publish Akka to your local Ivy repository as described above. Or using: To build Akka Modules first build and publish Akka to your local Ivy repository
as described above. Or using::
::
cd akka cd akka
sbt update publish-local sbt update publish-local
Then you can build Akka Modules using the same steps as building Akka. First update to get all dependencies (including the Akka core modules), then compile, test, or publish-local as needed. For example: Then you can build Akka Modules using the same steps as building Akka. First
update to get all dependencies (including the Akka core modules), then compile,
:: test, or publish-local as needed. For example::
cd akka-modules cd akka-modules
sbt update publish-local sbt update publish-local
Microkernel distribution Microkernel distribution
^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^
To build the Akka Modules microkernel (the same as the Akka Modules distribution download) use the ``dist`` command: To build the Akka Modules microkernel (the same as the Akka Modules distribution
download) use the ``dist`` command::
::
sbt dist sbt dist
The distribution zip can be found in the dist directory and is called ``akka-modules-{version}.zip``. The distribution zip can be found in the dist directory and is called
``akka-modules-{version}.zip``.
To run the mircokernel, unzip the zip file, change into the unzipped directory, set the ``AKKA_HOME`` environment variable, and run the main jar file. For example: To run the mircokernel, unzip the zip file, change into the unzipped directory,
set the ``AKKA_HOME`` environment variable, and run the main jar file. For
example:
:: .. code-block:: none
unzip dist/akka-modules-1.1-SNAPSHOT.zip unzip dist/akka-modules-1.1-SNAPSHOT.zip
cd akka-modules-1.1-SNAPSHOT cd akka-modules-1.1-SNAPSHOT
export AKKA_HOME=`pwd` export AKKA_HOME=`pwd`
java -jar akka-modules-1.1-SNAPSHOT.jar java -jar akka-modules-1.1-SNAPSHOT.jar
The microkernel will boot up and install the sample applications that reside in the distribution's ``deploy`` directory. You can deploy your own applications into the ``deploy`` directory as well. The microkernel will boot up and install the sample applications that reside in
the distribution's ``deploy`` directory. You can deploy your own applications
into the ``deploy`` directory as well.
----
Scripts Scripts
------- -------
@ -177,32 +191,38 @@ Linux/Unix init script
Here is a Linux/Unix init script that can be very useful: Here is a Linux/Unix init script that can be very useful:
`<http://github.com/jboner/akka/blob/master/scripts/akka-init-script.sh>`_ http://github.com/jboner/akka/blob/master/scripts/akka-init-script.sh
Copy and modify as needed. Copy and modify as needed.
Simple startup shell script Simple startup shell script
^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^
This little script might help a bit. Just make sure you have the Akka distribution in the '$AKKA_HOME/dist' directory and then invoke this script to start up the kernel. The distribution is created in the './dist' dir for you if you invoke 'sbt dist'. This little script might help a bit. Just make sure you have the Akka
distribution in the '$AKKA_HOME/dist' directory and then invoke this script to
start up the kernel. The distribution is created in the './dist' dir for you if
you invoke 'sbt dist'.
`<http://github.com/jboner/akka/blob/master/scripts/run_akka.sh>`_ http://github.com/jboner/akka/blob/master/scripts/run_akka.sh
Copy and modify as needed. Copy and modify as needed.
----
Dependencies Dependencies
------------ ------------
If you are managing dependencies by hand you can find out what all the compile dependencies are for each module by looking in the ``lib_managed/compile`` directories. For example, you can run this to create a listing of dependencies (providing you have the source code and have run ``sbt update``): If you are managing dependencies by hand you can find out what all the compile
dependencies are for each module by looking in the ``lib_managed/compile``
:: directories. For example, you can run this to create a listing of dependencies
(providing you have the source code and have run ``sbt update``)::
cd akka cd akka
ls -1 */lib_managed/compile ls -1 */lib_managed/compile
Here are the dependencies used by the Akka core modules.
Dependencies used by the Akka core modules
------------------------------------------
akka-actor akka-actor
^^^^^^^^^^ ^^^^^^^^^^
@ -247,8 +267,9 @@ akka-http
* jsr250-api-1.0.jar * jsr250-api-1.0.jar
* jsr311-api-1.1.jar * jsr311-api-1.1.jar
----
Here are the dependencies used by the Akka modules. Dependencies used by the Akka modules
-------------------------------------
akka-amqp akka-amqp
^^^^^^^^^ ^^^^^^^^^

View file

@ -0,0 +1,31 @@
Configuration
=============
Specifying the configuration file
---------------------------------
If you don't specify a configuration file then Akka uses default values. If
you want to override these then you should edit the ``akka.conf`` file in the
``AKKA_HOME/config`` directory. This config inherits from the
``akka-reference.conf`` file that you see below. Use your ``akka.conf`` to override
any property in the reference config.
The config can be specified in various ways:
* Define the ``-Dakka.config=...`` system property option
* Put an ``akka.conf`` file on the classpath
* Define the ``AKKA_HOME`` environment variable pointing to the root of the Akka
distribution. The config is taken from the ``AKKA_HOME/config`` directory. You
can also point to the AKKA_HOME by specifying the ``-Dakka.home=...`` system
property option.
Defining the configuration file
-------------------------------
Here is the reference configuration file:
.. literalinclude:: ../../config/akka-reference.conf
:language: none

View file

@ -31,7 +31,7 @@ If you want don't want to type in the code and/or set up a Maven project then yo
Prerequisites Prerequisites
------------- -------------
This tutorial assumes that you have Jave 1.6 or later installed on you machine and ``java`` on your ``PATH``. You also need to know how to run commands in a shell (ZSH, Bash, DOS etc.) and a decent text editor or IDE to type in the Java code. This tutorial assumes that you have Java 1.6 or later installed on you machine and ``java`` on your ``PATH``. You also need to know how to run commands in a shell (ZSH, Bash, DOS etc.) and a decent text editor or IDE to type in the Java code.
You need to make sure that ``$JAVA_HOME`` environment variable is set to the root of the Java distribution. You also need to make sure that the ``$JAVA_HOME/bin`` is on your ``PATH``:: You need to make sure that ``$JAVA_HOME`` environment variable is set to the root of the Java distribution. You also need to make sure that the ``$JAVA_HOME/bin`` is on your ``PATH``::
@ -412,7 +412,7 @@ Here is the master actor::
A couple of things are worth explaining further. A couple of things are worth explaining further.
First, we are passing in a ``java.util.concurrent.CountDownLatch`` to the ``Master`` actor. This latch is only used for plumbing (in this specific tutorial), to have a simple way of letting the outside world knowing when the master can deliver the result and shut down. In more idiomatic Akka code, as we will see in part two of this tutorial series, we would not use a latch but other abstractions and functions like ``Channel``, ``Future`` and ``!!!`` to achive the same thing in a non-blocking way. But for simplicity let's stick to a ``CountDownLatch`` for now. First, we are passing in a ``java.util.concurrent.CountDownLatch`` to the ``Master`` actor. This latch is only used for plumbing (in this specific tutorial), to have a simple way of letting the outside world knowing when the master can deliver the result and shut down. In more idiomatic Akka code, as we will see in part two of this tutorial series, we would not use a latch but other abstractions and functions like ``Channel``, ``Future`` and ``!!!`` to achieve the same thing in a non-blocking way. But for simplicity let's stick to a ``CountDownLatch`` for now.
Second, we are adding a couple of life-cycle callback methods; ``preStart`` and ``postStop``. In the ``preStart`` callback we are recording the time when the actor is started and in the ``postStop`` callback we are printing out the result (the approximation of Pi) and the time it took to calculate it. In this call we also invoke ``latch.countDown()`` to tell the outside world that we are done. Second, we are adding a couple of life-cycle callback methods; ``preStart`` and ``postStop``. In the ``preStart`` callback we are recording the time when the actor is started and in the ``postStop`` callback we are printing out the result (the approximation of Pi) and the time it took to calculate it. In this call we also invoke ``latch.countDown()`` to tell the outside world that we are done.

View file

@ -306,7 +306,7 @@ Here is the master actor::
A couple of things are worth explaining further. A couple of things are worth explaining further.
First, we are passing in a ``java.util.concurrent.CountDownLatch`` to the ``Master`` actor. This latch is only used for plumbing (in this specific tutorial), to have a simple way of letting the outside world knowing when the master can deliver the result and shut down. In more idiomatic Akka code, as we will see in part two of this tutorial series, we would not use a latch but other abstractions and functions like ``Channel``, ``Future`` and ``!!!`` to achive the same thing in a non-blocking way. But for simplicity let's stick to a ``CountDownLatch`` for now. First, we are passing in a ``java.util.concurrent.CountDownLatch`` to the ``Master`` actor. This latch is only used for plumbing (in this specific tutorial), to have a simple way of letting the outside world knowing when the master can deliver the result and shut down. In more idiomatic Akka code, as we will see in part two of this tutorial series, we would not use a latch but other abstractions and functions like ``Channel``, ``Future`` and ``!!!`` to achieve the same thing in a non-blocking way. But for simplicity let's stick to a ``CountDownLatch`` for now.
Second, we are adding a couple of life-cycle callback methods; ``preStart`` and ``postStop``. In the ``preStart`` callback we are recording the time when the actor is started and in the ``postStop`` callback we are printing out the result (the approximation of Pi) and the time it took to calculate it. In this call we also invoke ``latch.countDown`` to tell the outside world that we are done. Second, we are adding a couple of life-cycle callback methods; ``preStart`` and ``postStop``. In the ``preStart`` callback we are recording the time when the actor is started and in the ``postStop`` callback we are printing out the result (the approximation of Pi) and the time it took to calculate it. In this call we also invoke ``latch.countDown`` to tell the outside world that we are done.

11
akka-docs/intro/index.rst Normal file
View file

@ -0,0 +1,11 @@
Introduction
============
.. toctree::
:maxdepth: 2
why-akka
getting-started-first-scala
getting-started-first-java
building-akka
configuration

View file

Before

Width:  |  Height:  |  Size: 1.5 KiB

After

Width:  |  Height:  |  Size: 1.5 KiB

Before After
Before After

View file

@ -0,0 +1,68 @@
Why Akka?
=========
What features can the Akka platform offer, over the competition?
----------------------------------------------------------------
Akka is an unified runtime and programming model for:
- Scale up (Concurrency)
- Scale out (Remoting)
- Fault tolerance
One thing to learn and admin, with high cohesion and coherent semantics.
Akka is a very scalable piece of software, not only in the performance sense,
but in the size of applications it is useful for. The core of Akka, akka-actor,
is very small and easily dropped into an existing project where you need
asynchronicity and lockless concurrency without hassle.
You can choose to include only the parts of akka you need in your application
and then there's the whole package, the Akka Microkernel, which is a standalone
container to deploy your Akka application in. With CPUs growing more and more
cores every cycle, Akka is the alternative that provides outstanding performance
even if you're only running it on one machine. Akka also supplies a wide array
of concurrency-paradigms, allowing for users to choose the right tool for the
job.
The integration possibilities for Akka Actors are immense through the Apache
Camel integration. We provide Software Transactional Memory concurrency control
through the excellent Multiverse project, and have integrated that with Actors,
creating Transactors for coordinated concurrent transactions. We have Agents and
Dataflow concurrency as well.
What's a good use-case for Akka?
--------------------------------
(Web, Cloud, Application) Services - Actors lets you manage service failures
(Supervisors), load management (back-off strategies, timeouts and
processing-isolation), both horizontal and vertical scalability (add more cores
and/or add more machines). Think payment processing, invoicing, order matching,
datacrunching, messaging. Really any highly transactional systems like banking,
betting, games.
Here's what some of the Akka users have to say about how they are using Akka:
http://stackoverflow.com/questions/4493001/good-use-case-for-akka
Cloudy Akka
-----------
And that's all in the ApacheV2-licensed open source project. On top of that we
have a commercial product called Cloudy Akka which provides the following
features:
#. Dynamically clustered ActorRegistry with both automatic and manual migration
of actors
#. Cluster membership and cluster event subscriptions
#. Durable actor mailboxes of different sizes and shapes - file-backed,
Redis-backed, ZooKeeper-backed, Beanstalkd-backed and with AMQP and JMS-based
in the works
#. Monitoring influenced by Dapper for cross-machine message tracing and
JMX-exposed statistics
Read more `here <http://scalablesolutions.se/products.html>`_.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.5 KiB

View file

@ -1,180 +0,0 @@
Configuration
=============
Specifying the configuration file
---------------------------------
If you don't specify a configuration file then Akka is using default values. If you want to override these then you should edit the 'akka.conf' file in the 'AKKA_HOME/config' directory. This config inherits from the 'akka-reference.conf' file that you see below, use your 'akka.conf' to override any property in the reference config.
The config can be specified in a various of ways:
* Define the '-Dakka.config=...' system property option.
* Put the 'akka.conf' file on the classpath.
* Define 'AKKA_HOME' environment variable pointing to the root of the Akka distribution, in which the config is taken from the 'AKKA_HOME/config' directory, you can also point to the AKKA_HOME by specifying the '-Dakka.home=...' system property option.
Defining the configuration file
-------------------------------
`<code format="ruby">`_
####################
# Akka Config File #
####################
# This file has all the default settings, so all these could be removed with no visible effect.
# Modify as needed.
akka {
version = "1.1-SNAPSHOT" # Akka version, checked against the runtime version of Akka.
enabled-modules = [] # Comma separated list of the enabled modules. Options: ["remote", "camel", "http"]
time-unit = "seconds" # Time unit for all timeout properties throughout the config
event-handlers = ["akka.event.EventHandler$DefaultListener"] # event handlers to register at boot time (EventHandler$DefaultListener logs to STDOUT)
event-handler-level = "DEBUG" # Options: ERROR, WARNING, INFO, DEBUG
# These boot classes are loaded (and created) automatically when the Akka Microkernel boots up
# Can be used to bootstrap your application(s)
# Should be the FQN (Fully Qualified Name) of the boot class which needs to have a default constructor
# boot = ["sample.camel.Boot",
# "sample.rest.java.Boot",
# "sample.rest.scala.Boot",
# "sample.security.Boot"]
boot = []
actor {
timeout = 5 # Default timeout for Future based invocations
# - Actor: !! && !!!
# - UntypedActor: sendRequestReply && sendRequestReplyFuture
# - TypedActor: methods with non-void return type
serialize-messages = off # Does a deep clone of (non-primitive) messages to ensure immutability
throughput = 5 # Default throughput for all ExecutorBasedEventDrivenDispatcher, set to 1 for complete fairness
throughput-deadline-time = -1 # Default throughput deadline for all ExecutorBasedEventDrivenDispatcher, set to 0 or negative for no deadline
dispatcher-shutdown-timeout = 1 # Using the akka.time-unit, how long dispatchers by default will wait for new actors until they shut down
default-dispatcher {
type = "GlobalExecutorBasedEventDriven" # Must be one of the following, all "Global*" are non-configurable
# - ExecutorBasedEventDriven
# - ExecutorBasedEventDrivenWorkStealing
# - GlobalExecutorBasedEventDriven
keep-alive-time = 60 # Keep alive time for threads
core-pool-size-factor = 1.0 # No of core threads ... ceil(available processors * factor)
max-pool-size-factor = 4.0 # Max no of threads ... ceil(available processors * factor)
executor-bounds = -1 # Makes the Executor bounded, -1 is unbounded
allow-core-timeout = on # Allow core threads to time out
rejection-policy = "caller-runs" # abort, caller-runs, discard-oldest, discard
throughput = 5 # Throughput for ExecutorBasedEventDrivenDispatcher, set to 1 for complete fairness
throughput-deadline-time = -1 # Throughput deadline for ExecutorBasedEventDrivenDispatcher, set to 0 or negative for no deadline
mailbox-capacity = -1 # If negative (or zero) then an unbounded mailbox is used (default)
# If positive then a bounded mailbox is used and the capacity is set using the property
# NOTE: setting a mailbox to 'blocking' can be a bit dangerous,
# could lead to deadlock, use with care
#
# The following are only used for ExecutorBasedEventDriven
# and only if mailbox-capacity > 0
mailbox-push-timeout-time = 10 # Specifies the timeout to add a new message to a mailbox that is full - negative number means infinite timeout
# (in unit defined by the time-unit property)
}
}
stm {
fair = on # Should global transactions be fair or non-fair (non fair yield better performance)
max-retries = 1000
timeout = 5 # Default timeout for blocking transactions and transaction set (in unit defined by
# the time-unit property)
write-skew = true
blocking-allowed = false
interruptible = false
speculative = true
quick-release = true
propagation = "requires"
trace-level = "none"
}
jta {
provider = "from-jndi" # Options: - "from-jndi" (means that Akka will try to detect a TransactionManager in the JNDI)
# - "atomikos" (means that Akka will use the Atomikos based JTA impl in 'akka-jta',
# e.g. you need the akka-jta JARs on classpath).
timeout = 60
}
http {
hostname = "localhost"
port = 9998
#If you are using akka.http.AkkaRestServlet
filters = ["se.scalablesolutions.akka.security.AkkaSecurityFilterFactory"] # List with all jersey filters to use
# resource-packages = ["sample.rest.scala",
# "sample.rest.java",
# "sample.security"] # List with all resource packages for your Jersey services
resource-packages = []
# The authentication service to use. Need to be overridden (sample now)
# authenticator = "sample.security.BasicAuthenticationService"
authenticator = "N/A"
# Uncomment if you are using the KerberosAuthenticationActor
# kerberos {
# servicePrincipal = "HTTP/localhost@EXAMPLE.COM"
# keyTabLocation = "URL to keytab"
# kerberosDebug = "true"
# realm = "EXAMPLE.COM"
# }
kerberos {
servicePrincipal = "N/A"
keyTabLocation = "N/A"
kerberosDebug = "N/A"
realm = ""
}
#If you are using akka.http.AkkaMistServlet
mist-dispatcher {
#type = "GlobalExecutorBasedEventDriven" # Uncomment if you want to use a different dispatcher than the default one for Comet
}
connection-close = true # toggles the addition of the "Connection" response header with a "close" value
root-actor-id = "_httproot" # the id of the actor to use as the root endpoint
root-actor-builtin = true # toggles the use of the built-in root endpoint base class
timeout = 1000 # the default timeout for all async requests (in ms)
expired-header-name = "Async-Timeout" # the name of the response header to use when an async request expires
expired-header-value = "expired" # the value of the response header to use when an async request expires
}
remote {
# secure-cookie = "050E0A0D0D06010A00000900040D060F0C09060B" # generate your own with '$AKKA_HOME/scripts/generate_secure_cookie.sh' or using 'Crypt.generateSecureCookie'
secure-cookie = ""
compression-scheme = "zlib" # Options: "zlib" (lzf to come), leave out for no compression
zlib-compression-level = 6 # Options: 0-9 (1 being fastest and 9 being the most compressed), default is 6
layer = "akka.remote.netty.NettyRemoteSupport"
server {
hostname = "localhost" # The hostname or IP that clients should connect to
port = 2552 # The port clients should connect to. Default is 2552 (AKKA)
message-frame-size = 1048576 # Increase this if you want to be able to send messages with large payloads
connection-timeout = 1
require-cookie = off # Should the remote server require that it peers share the same secure-cookie (defined in the 'remote' section)?
untrusted-mode = off # Enable untrusted mode for full security of server managed actors, allows untrusted clients to connect.
backlog = 4096 # Sets the size of the connection backlog
execution-pool-keepalive = 60# Length in akka.time-unit how long core threads will be kept alive if idling
execution-pool-size = 16# Size of the core pool of the remote execution unit
max-channel-memory-size = 0 # Maximum channel size, 0 for off
max-total-memory-size = 0 # Maximum total size of all channels, 0 for off
}
client {
buffering {
retry-message-send-on-failure = on
capacity = -1 # If negative (or zero) then an unbounded mailbox is used (default)
# If positive then a bounded mailbox is used and the capacity is set using the property
}
reconnect-delay = 5
read-timeout = 10
message-frame-size = 1048576
reap-futures-delay = 5
reconnection-time-window = 600 # Maximum time window that a client should try to reconnect for
}
}
}
`<code>`_

View file

@ -1,5 +1,5 @@
Actors (Scala) Actors
============== ======
Module stability: **SOLID** Module stability: **SOLID**

View file

@ -0,0 +1,8 @@
Scala API
=========
.. toctree::
:maxdepth: 2
actors
fsm

View file

@ -189,7 +189,7 @@ class ServerInitiatedRemoteActorSpec extends AkkaRemoteTest {
while(!testDone()) { while(!testDone()) {
if (latch.await(200, TimeUnit.MILLISECONDS)) if (latch.await(200, TimeUnit.MILLISECONDS))
error("Test didn't complete within 100 cycles") sys.error("Test didn't complete within 100 cycles")
else else
latch.countDown() latch.countDown()
} }

View file

@ -89,7 +89,6 @@ object Pi extends App {
def receive = { def receive = {
case Calculate => case Calculate =>
// schedule work // schedule work
//for (arg <- 0 until nrOfMessages) router ! Work(arg, nrOfElements)
for (i <- 0 until nrOfMessages) router ! Work(i * nrOfElements, nrOfElements) for (i <- 0 until nrOfMessages) router ! Work(i * nrOfElements, nrOfElements)
// send a PoisonPill to all workers telling them to shut down themselves // send a PoisonPill to all workers telling them to shut down themselves

View file

@ -996,14 +996,20 @@ private[akka] abstract class ActorAspect {
None) //TODO: REVISIT: Use another classloader? None) //TODO: REVISIT: Use another classloader?
if (isOneWay) null // for void methods if (isOneWay) null // for void methods
else if (future.isEmpty) throw new IllegalActorStateException("No future returned from call to [" + joinPoint + "]")
else if (TypedActor.returnsFuture_?(methodRtti)) future.get else if (TypedActor.returnsFuture_?(methodRtti)) future.get
else if (TypedActor.returnsOption_?(methodRtti)) {
import akka.japi.{Option => JOption}
future.get.await.resultOrException.as[JOption[AnyRef]] match {
case None => JOption.none[AnyRef]
case Some(x) if ((x eq null) || x.isEmpty) => JOption.some[AnyRef](null)
case Some(x) => x
}
}
else { else {
if (future.isDefined) { val result = future.get.await.resultOrException
future.get.await
val result = future.get.resultOrException
if(result.isDefined) result.get if(result.isDefined) result.get
else throw new IllegalActorStateException("No result returned from call to [" + joinPoint + "]") else throw new IllegalActorStateException("No result returned from call to [" + joinPoint + "]")
} else throw new IllegalActorStateException("No future returned from call to [" + joinPoint + "]")
} }
} }