From 475a29c15136e1c8c023985e0b5c8de998d99698 Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Tue, 31 Aug 2010 19:12:14 +0200 Subject: [PATCH 01/52] Initial UUID migration --- .../se/scalablesolutions/akka/util/UUID.java | 59 ------------------ akka-actor/src/main/scala/util/UUID.scala | 9 +++ .../src/main/scala/remote/RemoteClient.scala | 12 ---- .../serialization/SerializationProtocol.scala | 5 +- embedded-repo/com/eaio/uuid/3.2/uuid-3.2.jar | Bin 0 -> 15009 bytes embedded-repo/com/eaio/uuid/3.2/uuid-3.2.pom | 8 +++ project/build/AkkaProject.scala | 3 + 7 files changed, 23 insertions(+), 73 deletions(-) delete mode 100644 akka-actor/src/main/java/se/scalablesolutions/akka/util/UUID.java create mode 100644 akka-actor/src/main/scala/util/UUID.scala create mode 100644 embedded-repo/com/eaio/uuid/3.2/uuid-3.2.jar create mode 100644 embedded-repo/com/eaio/uuid/3.2/uuid-3.2.pom diff --git a/akka-actor/src/main/java/se/scalablesolutions/akka/util/UUID.java b/akka-actor/src/main/java/se/scalablesolutions/akka/util/UUID.java deleted file mode 100644 index f06aab34df..0000000000 --- a/akka-actor/src/main/java/se/scalablesolutions/akka/util/UUID.java +++ /dev/null @@ -1,59 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package se.scalablesolutions.akka.util; - -/** - * NOTE: - *

- * This code is based on code from the [Plasmid Replication Engine] project. - *
- * Licensed under [Mozilla Public License 1.0 (MPL)]. - *

- * Original JavaDoc: - *

- * Our distributed objects are generally named most efficiently (and cleanly) - * by their UUID's. This class provides some static helpers for using UUID's. - * If it was efficient to do in Java, I would make the uuid an normal class - * and use instances of it. However, in current JVM's, we would end up using an - * Object to represent a long, which is pretty expensive. Maybe someday. ### - *

- * UUID format: currently using currentTimeMillis() for the low bits. This uses - * about 40 bits for the next 1000 years, leaving 24 bits for debugging - * and consistency data. I'm using 8 of those for a magic asci 'U' byte. - *

- * Future: use one instance of Uuid per type of object for better performance - * and more detailed info (instance could be matched to its uuid's via a map or - * array). This all static version bites.### - */ -public final class UUID { - - public static final long UUID_NONE = 0; - public static final long UUID_WILD = -1; - public static final long UUID_MAGICMASK = 0xff << 56; - public static final long UUID_MAGIC = 'U' << 56; - - protected static long lastTime; - - /** - * Generate and return a new Universally Unique ID. - * Happens to be monotonically increasing. - */ - public synchronized static long newUuid() { - long time = System.currentTimeMillis(); - - if (time <= lastTime) { - time = lastTime + 1; - } - lastTime = time; - return UUID_MAGIC | time; - } - - /** - * Returns true if uuid could have been generated by Uuid. - */ - public static boolean isValid(final long uuid) { - return (uuid & UUID_MAGICMASK) == UUID_MAGIC - && (uuid & ~UUID_MAGICMASK) != 0; - } -} \ No newline at end of file diff --git a/akka-actor/src/main/scala/util/UUID.scala b/akka-actor/src/main/scala/util/UUID.scala new file mode 100644 index 0000000000..548f014e58 --- /dev/null +++ b/akka-actor/src/main/scala/util/UUID.scala @@ -0,0 +1,9 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.util + +object UUID { + def newUuid = new com.eaio.uuid.UUID() +} \ No newline at end of file diff --git a/akka-remote/src/main/scala/remote/RemoteClient.scala b/akka-remote/src/main/scala/remote/RemoteClient.scala index 35578477ff..459c260a62 100644 --- a/akka-remote/src/main/scala/remote/RemoteClient.scala +++ b/akka-remote/src/main/scala/remote/RemoteClient.scala @@ -31,18 +31,6 @@ import java.util.concurrent.atomic.AtomicLong import scala.collection.mutable.{HashSet, HashMap} import scala.reflect.BeanProperty -/** - * Atomic remote request/reply message id generator. - * - * @author Jonas Bonér - */ -object RemoteRequestProtocolIdFactory { - private val nodeId = UUID.newUuid - private val id = new AtomicLong - - def nextId: Long = id.getAndIncrement + nodeId -} - /** * Life-cycle events for RemoteClient. */ diff --git a/akka-remote/src/main/scala/serialization/SerializationProtocol.scala b/akka-remote/src/main/scala/serialization/SerializationProtocol.scala index 3f54f8e921..4050c2026f 100644 --- a/akka-remote/src/main/scala/serialization/SerializationProtocol.scala +++ b/akka-remote/src/main/scala/serialization/SerializationProtocol.scala @@ -10,11 +10,12 @@ import se.scalablesolutions.akka.stm.global._ import se.scalablesolutions.akka.stm.TransactionManagement._ import se.scalablesolutions.akka.stm.TransactionManagement import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ -import se.scalablesolutions.akka.remote.{RemoteServer, RemoteRequestProtocolIdFactory, MessageSerializer} +import se.scalablesolutions.akka.remote.{RemoteServer, MessageSerializer} import se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType._ import se.scalablesolutions.akka.serialization.Serializer import com.google.protobuf.ByteString +import se.scalablesolutions.akka.util.UUID /** * Type class definition for Actor Serialization @@ -267,7 +268,7 @@ object RemoteActorSerialization { val actorInfo = actorInfoBuilder.build val requestBuilder = RemoteRequestProtocol.newBuilder - .setId(RemoteRequestProtocolIdFactory.nextId) + .setId(UUID.newUuid) .setMessage(MessageSerializer.serialize(message)) .setActorInfo(actorInfo) .setIsOneWay(isOneWay) diff --git a/embedded-repo/com/eaio/uuid/3.2/uuid-3.2.jar b/embedded-repo/com/eaio/uuid/3.2/uuid-3.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..b205a2488e6c050feec7e89352e93fc9ae59ab09 GIT binary patch literal 15009 zcmWIWW@h1H00GO~={6u5hB+A+7<^qr9CbbY-1K3}I2bH)r`!01`D&ywFfi0IFffQ9 zEAw^q^K^3!4$<><`|Nw>w2!y0-bG$-U9EFx&TkGfxMKX^X_1cCxf43xx=tMIPnLvD z5z{`PQn7PRcaX4mP;kjyt+gUD+(MI$gG6hDPX~Rj*g40`xY+B!qsX6_zdL;l}0BvM)C9r6xJS5=n3kPP_G5x$St7p-~t?&}BNg~v7-Ew9d)D?Lw6F0WZtai*02t*G@&b+=@N zw3}P>rM?f5buiX?Y8#SkDHqCcZozSLi7Odru_;!zE-^b^kt|-{q zb}1_OrO?j>W>>$=TYGWtrh@06bw6n*70L^UtPH)``0n|e?b;UE*JqtPxMGcT=~GLl zm{q%0ev`|)KG*JLH`B)tJC$G0^%oPan9n%JXRY-A_0J*>m%P}L%e-RiayuWs_G|pw zSCzXG`q+(|LSvsxn*7?g@66d)gLK!a2gP5l6Fs}@`L#KJ^nT7}(au`7(cRNDIjec< z*{=1gis~M%l)ha0?9JlPW$U;4r_AS=SP?3BCf_uDXARTd!i=v;2kj4h__yl1_U&me z{w`S8x^h;+-ly#ri>GhrlaEXO-Yl8E_kZ#uab!6=^96Viw>Iy$(>n}N)toNO9%}$>Q!q+^!6c;;v+R(AGVU0lKEaNHt6U-i- zj9S2Tdy|V^;8YdO4dONq`xX}3cwc0?%Y8*qVA;*M56^pBe5KrZ+O^xxz0gV%*W?jB zHYJ}~=%Q|Inz5FvnBn@^yKl{MI~At{>iz?p1K z1_lOo1_lNm1_lODw$e+^&($j}%IW@gM%z>Ol(#PDxfA+Yo_e~SPoHw}e%g3eLsN6Z zrR)B_XU<&rK6joK&B*neA8vMIW?=Zv%D}+Ez`y{?q zN`#n(hc^E|86z2K5qa?H*R%513%)-8^Xl@S%g6iYFW;auWnbgpxRwu$U3?+8%{NV* zt3BtE>R&Zc?40MTNVCv6 zu`DfZYqcUx7FgML28+y_vij?T10Jt?v^docEZHQfl~lxKzAE%xYEsJs^)$}%4H~UC z*e<;Cl6|SYcVC@_$*F6MAzc^vo1K@drfXWSQPL6Fa)E74juca%M#8PN!p9GEA9J(i zz3%(?)7tr2cZ2j8_j(*Elx4r1nEdjzR`27_we=@1O>Do=zbo^LaHdXaxaOp^{AICc z4C7?J+`n{M>b%=>*-dgQFZ6v`Z*AL}6T6_TdCoC)?WRMO;`vh~7bVKhV{e^(G5hiF zf8ifrnmm=}i`;bXedH;}lnvgK^Oc?`&e_QGb~D2}*L|MfEEC*LJ^I5q$#laH#(zSE z5)A+Mc~?#o44Kq)#X(8%ra+^#*-H@MNf`T6O`Y*a98M&BiH87n~-}`_IoQQ z(^9+QM-}@Y-cwD|VcK@eRp@d}nCG138EhKnXE$Cs-yZbd^%!%)$`ctDuT5^UIb1%v z=*Z)Q%rou^v;O?%JtVS)GiUzCa5u$t!9%Gb|IO-eJvo}wwmmK5#hR@j&hw~s$B7yF zo9%7SI4?F`-@T;f+#E?SbNPu3YSle^{mWM!OCsHcm2YMzQmVo^$YVo|DNN=i{`aj{Q+es*brUUE)iaq-lU z+kV1={6@mBW&4HrCG*QMB0e`bqM`J4bTE_IGmTjmP2tXh%TIb-`~wK=ZIx@SFn zulPy5YO*BKkJF!z%$0-aj3kH;S0V#ZOru%P60Med8t!?!Mc)~KE``?cGo z$}LZNrxkpiQBa6-%@9z@T58$N>O*`2n#%XF>wM1B9{9z6^)1HTniWPZoypJnRtQG$}@6(wQFAJl`EnS|x!85L1 z=umUITxqmohGNOz<1U<6(@W>C%k5Ojj@YhvCuy3^oCh!Wc=s&dx1_PW?DC0oxBF)J zcFQL2ym`lKy?~bP#^+)O9{HZS{9uXcnZ+HZ>&rECzPoa|K3RRS;M(dnbGzqHUj4}2 zNHTFLN9{uUApd6$mcK7vU0IOHY#tosu*LE5_mH}pKTj>2uRflAPW@x#y-JQ>GmPV( zEst*!apt|hbq{ma#RKXmHN~3h7xWY=I9^o#@ocxhz{`IvMu(z(KDa8@I9IND&hSrA zb;TS7k&wRCf8Spe3z@=r-`b?MaaYXU!j7&5MJ?Ry94_A<-v9r#RQ^=Wb@p|S4l_Mk z@x*G<)d@L`(;h5N|8TjOB|~!SmZq>MSL?k84E7!ou4=6`+TifIBbcju%f#17bE>nR z{$H&y%QD*9>SB|2fJE(~AEE2EoQS&~p`jVD=)zi;^^e(%Gxaqlet)0P&2L^Tw>{u| zdPA-hTVQ+Febn6ZzR^3^kDGy^T$F)98dvUt*#Ws(DDH{&Y1w75evPVV{fjF)9g!iun!0jqhnwq&(C?|muIpdVX}<5}!M_y;dhHI zZ@Vulul>DFnq5Su{<1|v@x;U#CS)b@z+lY7p^S6D0Mia zZ!6b|s}no&!rHcGg!FUmI2m!;S$9{%jvm3xi%guXjdy0P-pV`6evRv_KVPRwcA1S}YZ{Sw%2wOuV?%a2mslpo0(11{`OaxJ~Ow5sLzQ?r+0PhUnCsmq0R zzL0+G{+-1qcJr4X?VBGMX`bag;uG9|JnuTGj(b_xW_|Jbb@kzf zdrMkRGhgZ}`OCP#i#jg@!?Em6$7QIViT{;IJ)9>ly}Us2Q#|n zKhWKI)&J=SV^Mqe&0X;cy1`1$Rx2x{^EH1g?J6&bIk-M#&V+#K?x0zbQL-CLHV1O6 zR5v%3hyIb;FQ}0uHSLs<&eYV1Sq7y^$Kn<}+Wup1*Zswj36~;OCmi^3-fRAn?sFdV z7tNhA{{Fa-qBl6e%ib(Oa&D5FLq{dsC%YbklQ zV%5_x-8+*_7PzMT@n88v_@9n}iXNw_T6y`D46c8R9^l-p?ppn|LDYY?@xqyx(-!cXyXwyEJMnUU zlvuaQE)(m#SIIXT*3FrbmTC3X%vK@bLuBlZ{jtyH7a zRLHlB->-48V)fH4(cjW5vkjOlD&4;Rs4ELo|9<-P2DSGAho&7_vt{O=EyYW{D-7b7 zRW3^m=#N^qv2*p@u<9wMQfYf!{j`3Re80(cC&gg>or+Y}FPGP!sxUfnEnD*BEeRj) z7f-|PZmDRBlq|D6efsiYTVL&6S}AL%pPb;Bz4gEypU8r1of8W~kLgF}bGxox@%PmO zwPf~ZRsSz1u1s@F)h^%a<@Vsu`45?=wmtl-9ks9^W`~EJn9Vn@udjJ8|FS=x94?vC zoW=9H!=_G{MhBQ(vo}G ziQB~<|J%0C;_=&dGQH;V{L%1# z(odGfJO8IVbn297x%o&YqVM4SGByy}T)?U2u{xt%cBSy-nH~vGn!g z-9p~akad;D5tcxw_kOMuXRz(paGp7%Q}U718KL5W&K(k>Nph=>)aZzXt`HWD&=N~f z{g~akzw3v>N8cmu9r>zuUNt>GUU%I0sJ;wC9ir(HF-C|-WOJ1@p#X>?a{xPsx0_d%b*-aAeE_hRdY|6X&ivck58f zvRgSefw^Td>UpbGtB&7X#4J>M^yVV{IgeHL%qZBZ5oo&Z(7#oMqTj-MpB(Qkkel>P zp}+RL*~(RO6W<>RwT-yY+sd1k5$j;5J=1I~!`PLWp9GApQCd_(D$=l0IG zN$yVf4!*BG^7oPBCEG`VA1yMc-_Yk%dD0&Fzjx8eWwJ@refvAij|4vI>)d{{@RIN& z+rmj-HdHvCl9fANbX@D3^zQZ!>k8fz>t>%l^)-Rjf3MWx%2eN!seWpU8n$$N4=`Lg zKQ>bVpT-kGNRSZ^2DT3Os{)lbsT za}v+~FR<;iey41Oxb;WHyvKJ7cAs!<|ByIcqS^Y0%N@> z?az)CZqp;WmcKY#n`F7`sFchC_v#~4Ej;?~sn1$**+;h7D)3d#f^%-Kau)homu?G8 zw=OLUNLMel`p|I0b?)6OJKL?k2S0aw^=9F@#aAllSjAqs=|7kGilxWg!$TMc)Gz4W$D*uI{XDHN`0dVYOTX>U*FBbae>DDq#rbF3 z3f?4a%X?a^(j7PVh{S~C7{5*2<{!?j@cWX)UOVH-I-f${&QO^zF-3XNUAJdGDqFh4 zR&$;9QCXkD<3e+~=Xn+_pCpsNMDI5r!{%RxXw_#!UiH~Y& zt7^`yj)lAi9KA8#mPoustrIuGma%5w|k*qpueN{=4-EztivuHx0V+O8?P4<7h> z{%(MWJ@=}M?jJiB?qm6D_n|$`gLl=pO~*D(SGDHm^kuuE=X74y$mC6)KwpJ{+{eTB zKF(&g<2~f@?XmmhOqsbZQ_TDrd<@>Ko#eB>@}s4QVClxy zq3ds-IUUtJx^%^qlzA+L79Z|$u8e)T-|MaS_fuO7q^GemFY8;z%C5_O{=*$7>DfG6 zZCWL-rJ8=fd$QyEuj%&`l5U)nw30R7yF1n8{CuxPv*M1NzN(k>GcI%Hy}4JvoVzU; zddh2c+!EjP9j^S7g)aWeE>Svq;-2Dq=2pLkmw#A_Z}4l}KM}mat&Ep-lXSpFzpbKE zPHeuE({*R*c9p5dLH(;ls-{0vSzgKo=%bR#K{iUxs@WmY5%TqVu zAn$Th=Z9?9eC%>A+2_R`mMos*I5Defb>LyvnA1P`QrSucw>$5+D-$EjFMLNiYu2M% zo%}0|Gvc2wNfOGAne03F^tP!fw*S~slhXDVEQ&1b3=F0G3=9%jlM*x$g}p46PM58V zi_Jc{N!hw{w$(w;VDpKp*S+}rCM~w;yM1D6Zii6&7KhE#%)eQETBoyCXgebpOGeej zNn#n7i)t4d3#6UlDU~rgJS98fmw3UM0$%3B<#x>Hf4{3zea%sE?ETN``S(9thrj>( zTiu?uK>l9OL!A=KGsiCUJMru`5Py1V!r`5MXOb1!gu4yrxEr>qMg*OkR;0>%+OTAy zPJ5S(4DVHmswbVD+;ewyi0-WN;`7hmp%PykbvWImCTx+-;)lgjcIzKvygtf%*^ zc`UipToa<#CFrN}+}&b)b(Q6GOIJH~)KxCDlxEu* zZ(A_^DcZvpoOpObEu^t`<3y7<7|Jbz8syNh40 z3SMPZxjg5Ew^!kXbwPR8-yUH}daTX5_=y4263NF8@3b{Fy0J!P8`mA!lJPsYg~du{ zcJ}32%iAYAm--d>YhK)|Sh%I?-JC$N9ria>p2f-L)V+_*`Idh;^v*_iZ`H%IiXYns z@yGAJ!4g;Qqundo_;%mao0<0$_lWFrh~GV3mb)tTps!n+TC+sjt0=d{+A;eB>*8xC zd7RX|eKOozb@v;I;;&I#D-T<)>}uShGc)ep@mZ^yG?$4>UU_>mE!DaI^p_d2Q(P}L zb*9Zc8oWyBx5w7h)GaPcdmE=RpLw~*GXJIDRv$TilhapkubdQb^(H{u)}8NH-}%*f ziFRw}+3obx=m{^4t>eTvVoKmE`qvyL+-cVbJS zPhwQUhyR|kZo6+DFWe(9pJ1NXBXoI7=nS(yX{Ak$dBq+q-~OzhVK8}?rEOM(5od3A z-n8#q^X9$jnEEOwcf0n?vwU0gx73>2_C=Ps_MLvzcWF!4#LcPGclm0kFZ{Yae`!nL z+ncSNs#nt=pWC)~;$6pYw|HN!nbIf1n|&`b!QW&JS9ZFO{!7-Ui#pc*>T+1-`~B3m z-1ys3b020BI%Fn`g54%)P8Xmpt%u&o*vh14THNhQ?5BjehHaRYlp3wF=lc!j;+_g~OO+?}J z?Pg2oqlxFDgUkiI-qx`{yb}9fA*lJ4s`~@azI4IXzPvM99w%p?H6)#afhcJd2rKIB`BtBU-vqxn)zC-@de+#SN#%A{w%%hXm#n&efPH~+UH7y z8}4Y=lMbKrll6cSVMY?8;l{9{6Ie%(RQ1Z+4cX%0>%+zjtICgWvA-PVexz z*yG=P+kMq19%)S5=%KW)Z{Ov7WfRjqUQC=}6PGH5d+)ek_`O={EBn2F6VE>N_nBk= z*ziwl>fi8F|L5xeW0U?LaBP{!y=OHtk*VJ{<$p6$`{6Y!_wt+LQ*K&5b6tBTHKeR6 z?9|ImNjsiTnx!AV^Pa$&`m3zP%jJFQF4>-#6U0`s!SQXx+0IZ^$wiB-%~S8x3yA&k z>sTsx&CP50na&~ zqQf0-UYY+-^4I#6$0pT@|INNp$s5&CxaPI^DXT(r6T^-bH^RgH|JDWk>a3V`Bd}%; z@7=2#r^h^reuG}``1->>B!Qpv8(G|MEwnny<2WH zX$JdO=XX1c-|c+du`^ z414vASw>GgV!|g|(f7%L=hmIL8Okcp@mgedUml-yXU3=G$XC3q*F)b- zXS}>a@WGiWhkYfpkM{R8t!bL$usBbTo8__jx;F=Y~D%a`@aa(|E3Ni7b~s$B(Yx(T~&4 z%{Z4nou}Gv=M=*fAML4UbX0PK>#Dux7axuhwh-`-yg6f{$=V3Je7)m(@$)41XQYJj z&i**-?44PO)=U!vSXW%U^Q@qE?qQ*J53V;W6W<^6xfJcbcJ7%&r;0y%96c9$x=iKK z4WE0FXRX)FoOtF^lwqE=*=j|Bb*y?Z)@}aRW=6Ij`np=?!sfO$_ik}E&RZ>7)-&JV zlJk1QHr-v5XYGvJw$O3X9mnsc8Q%o0!rQez<}J6FsGS<2&UiGY_&V#a_#@p8m#d5> zr=Rkb-u5z}?@I5#n8w!=S4W(G^xHj1a{d1+mfHEtd$^b7thpA%&3j5IgF|u6flC{@ zqNV(jJ@~e@ecrJk!MC}2Pg`kDMuqfZsb7UI3pSfsOZ7dP(R5qNtkC_iqRh1qhVLd_ z_@$%y!{zYuA3ld|YxJxSoRJc`EZ#EfRK&`umXEJx=H<6leQDuK zBPxzx(4DUSSCaqus;Zs$CR#>#{w=%goyNbvL0B$_zh$QO@q#UOZO?ZHPIY(q9`)%B zw`|K?gYYSx%r__dYHrxhCDi^fh-urzQ@6ACcvm*DvN>I4FKgFcv^Xg>=hT^xDesHI zzVuA>J}qCpqiBEj(Yv{shcAZb8fRObe>8u^qGcI-nA<)|O?Vv{{^I8HqLaFbv%~vM zzn|EVui(&^KbI=A49^NHCV$u04QUXC|KDtSIOOlEVjG~0bDjHlpA7n?<&<0*#i zb3T}@H}FwT^-gHir;M_HJo(g~P8CAb8ow+Vw4)7e0nbssxF3Ep_ z{bKjz50AFEFWSFgcf<50Zx-s9hIZe!*j!@Z{{3_6y4!b~4$Kve{askN`y>0_l;|dX zd#COXH!3&GExVX+Exlp-gkLM>79{(j_fVF9d*x)o%)kIDYgI6MD87!)&`H{W#G+zY zXCg}wZS~yx`Ml@l?`^7o{+4HG zGn{--PR4iP3`hB2!51R=H&`4?n&8@bRG|AIPhWD&#pgPH8zfQ-jXHxp=ZJaCDU4Cq z;Xcw`+s(A;QRKD*d4~(%-VEEE_B;GcRI`oc;nlNyT`!m4xL0_p;3_9eRnM;LIyaX+ zo*JclTle`)BvzE5KTsK9`>3(7OvETQ~FUG9? zc56yDXNQ(#ob49pS5xw4I7XD}R`;@flPZzfZmIEXhfPwuRpEM3t|cpOIc5A}G3Jgt zz5e!PKK%uItzRij514qkJ)|TNS@gSBt$#Dc!tY*nX45QMKCNVTae;mWfMHWP@b4 zcWUvSOP(w{a4+e2*UX0XDnD*LZ0WnLvvL3B2QM6_*|GdP`ojM^YnJmhj!eg>m-kyL zq-3*IjF&VDskB}2^|;G-$o+=9Rcrso`rQpTTx2{y{k@+5VrPW($IhA@*}YYMLTXd4 zZ?^quHl;{$y-vQu;^*ACDn05I0{X9SCg-XoO9ybK9>3)wX|?*xiYrU*xw&mLn;<0e zX5-S6M?U4>FrBqSSbUaDDA&r;IhRySE!TPOY-IG>r@Y$h*X{{b9e)GQ#l=qUTB@3{%*<|qUzfob`-$CUFL(8CS-0io=WF$~zP;VdD#uS~ zDfe!6IboSpF63S)*ZgHo`%A_C2SREI2~x!~j)w1OtE}mFy>le=)P6R3?;n!)AL#8n zs$A24yyO>m)(scg`iEBY7IxNaeqzmY|8idVsqKqJ=PDOfvwRQexS&&a$kAz%R@_lV zC$6tQ8k0VId|3KGxJJtNAJcrnMecpiEqs<69&%qa`CfPcbU7kqt;Bgp6Jvi?28I?c z1_l|FoL7>WqYqx~=aE_g9Uln&ogbVkQRi3f=YLzYY)1)Gw4`~V_H;f8={Lu2+;Dr& zrk0w}Y*=zP(R#_2#jgvPnLOGit!QbLzq(**L~nwFut@TSie8UBD*rhDG5%w8tJ$|+ zJ+)cxG5_$mz`WcMz_@Kw!`l^**y|EPQJ^!Hrr9nIGkXAPH&uyjk@^DL=9A96|NeB{h8^%v~# zJoQPN;V-`4dg9r2!nMlJOajm7vgKA-rElV|dEEJ?#d3mYzt)O*(aWlJCic!$S36sB zDQE3+6V;RLcb7-hq_A5yXlfeId_32;C(^yx{VcOTqQ+8A@qtAE?I<1U-+ zHm*Hzdz#P9z3=){)i!%>zAohZCUtq9*bfG=MMez@*H{D&Fv$jPRJg60I_JvA90pSf zjs$_&9f%ca=EC^`qyQ9T2_7HJ1IWB_Kjyzv3c<7(u=>| zRl0WbeaKd{6KiM^b&^XLE#N$P$8g83V}TD>wAwcp%@!6HGyG-AR2mT2e{>FOmx9j0 zpgudRkW}>yWw(UA4_|fvdR}l>>*Ufw9i{V<;w9TinHV1{$CE0^KXQIW|q}*oCQSN)TcH7Oh8;xThKh2OaZ{4Cc!>$MK)Yp)Ruoz>2>AtS+QlRCC7w&BJcfV>3sI%w~3&BBD0^TRji;7uVwC>_lKli_8hZ6 zemdw#T+*~j-;7;4ir-F=c(!6;>m-&${aG6Wz20n7?TUV(>G@{imJNGa&vuBkYQD}* z46m7+_v%tp)DJxyu1^-?Mm;vm4m-ZpD-mx=Q7)3*BfD6&KUMMTi5&N}c6T++q}=rO z-HCM2Uq08><)1M5J>*Ppby?BJ%{qW7q6+FIn&j6MaA6t%~m&g&fZ9@-qw`eIKOcF&fBT_E$wQb`e(7E7 zwWu-i@QVDh4f81ub~f121@k75JE1l?Z7?(a*tPlR4eyc|MD&5(@)^ z11|#uXpJm<`8~v)s8)mZ7v$&aRpjOz4fD-^Y$jl;Qs z-PPH)oK?3s)k?)_rF7KTreS+69f1$p5cJeH4uk3BC6F0Oy&GuNL^>Tjw zoeP^&(l%S1?`A(+e((GD?Kn-KN|v3`EBQQdn~z~416PttBvU5q|K+VJOAB2i5>~I$*|2|N zw%UACvD7xPyJuHE6CIpy#&odoWw zISzk6@fPi$a>nV?*X5$;9WyTEJvP|*_=2N(xctej+uuoLo|w0{b4Fao%~f~qPtj9f zyK`on=cP5X`~0~lW-tD+sr0EvhKiWf>>SfA`swD^_Dy6~v0ZX{`7_sNFApYbD{08j znfT|8XZOVg4j;5w{+cW^TUuAyQ++?5ozF6%slGHkE&ukxNArHajh60_mGw0ImGP`Y z>it(U!S)5WSKG&K+f!t+-1q7ttxWz?{&#kSJM_F2xq3RAqx0y6U+$lthl(%V{7`ho zN`48>CA=?}C!lpvcuXYItoTF-us336@@PPG$Rd&)epn*+1_6pSNq@!TBeho~@U?)hOG1iZ}b( zt0mK>KgxxxAG7^Cu50~I99B?4 zHt>X4RtqM97T+^4Fc=V3P=N}Xf};F_)S{Bi)Z*YW|Fhn@UOJw>fgYJoM7hI_cx>qsQpg@RK3Hn~_O`8TZa4$Sx%ThPRF&7QCQDXoGkZeWwyg zF$gbdR7dtU(!M2x379*ESU_6A7-aE&XpqXaW!N#jJY0q`A5 z=o->|voN4+3_;hq9&N`5LgzLX#QqOtT_(BHZP3@QBXlWopocIKE7{SFM_=5F zFg}4Bhw+fUAG$&4%M%a=Jr~1f5b|OLbhFUsrx9iuNn$e#yNl4r>Jg^oC}15g_m<>2kLlI&UDC=I=!_sWT zT||KzjtI>x`WTu~+j98a0cuDhjIt+W6k7R!y + + 4.0.0 + com.eaio + uuid + 3.2 + jar + \ No newline at end of file diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index 53afac6a22..cf7378b0d2 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -140,6 +140,8 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val jetty_util = "org.eclipse.jetty" % "jetty-util" % JETTY_VERSION % "compile" lazy val jetty_xml = "org.eclipse.jetty" % "jetty-xml" % JETTY_VERSION % "compile" + lazy val uuid = "com.eaio" % "uuid" % "3.2" % "compile" + lazy val guicey = "org.guiceyfruit" % "guice-all" % "2.0" % "compile" lazy val h2_lzf = "voldemort.store.compress" % "h2-lzf" % "1.0" % "compile" @@ -340,6 +342,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- class AkkaActorProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { + val uuid = Dependencies.uuid val configgy = Dependencies.configgy val hawtdispatch = Dependencies.hawtdispatch val multiverse = Dependencies.multiverse From 34da28ff03be7c3d15c81a892f85dd9912424c3a Mon Sep 17 00:00:00 2001 From: ticktock Date: Tue, 14 Sep 2010 16:43:15 -0400 Subject: [PATCH 02/52] initial structures --- .../src/main/scala/VoldemortSession.scala | 24 ++++ .../src/main/scala/VoldemortStorage.scala | 43 ++++++ .../main/scala/VoldemortStorageBackend.scala | 129 ++++++++++++++++++ .../src/test/resources/cluster.xml | 0 .../src/test/resources/server.properties | 0 .../src/test/resources/stores.xml | 52 +++++++ .../scala/VoldemortStorageBackendSuite.scala | 17 +++ project/build/AkkaProject.scala | 24 ++++ 8 files changed, 289 insertions(+) create mode 100644 akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortSession.scala create mode 100644 akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorage.scala create mode 100644 akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala create mode 100644 akka-persistence/akka-persistence-voldemort/src/test/resources/cluster.xml create mode 100644 akka-persistence/akka-persistence-voldemort/src/test/resources/server.properties create mode 100644 akka-persistence/akka-persistence-voldemort/src/test/resources/stores.xml create mode 100644 akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortSession.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortSession.scala new file mode 100644 index 0000000000..c0eca74832 --- /dev/null +++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortSession.scala @@ -0,0 +1,24 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.persistence.voldemort + +import se.scalablesolutions.akka.util.UUID +import se.scalablesolutions.akka.stm._ +import se.scalablesolutions.akka.persistence.common._ +import voldemort.client.StoreClient + + +class VoldemortSession { + + val voldemort: StoreClient + + def getOptionalBytes(name: String): Option[Array[Byte]] = { + + } + + def put(name:) + + +} \ No newline at end of file diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorage.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorage.scala new file mode 100644 index 0000000000..a590de349b --- /dev/null +++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorage.scala @@ -0,0 +1,43 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.persistence.voldemort + +import se.scalablesolutions.akka.util.UUID +import se.scalablesolutions.akka.stm._ +import se.scalablesolutions.akka.persistence.common._ + + +object VoldemortStorage extends Storage { + + type ElementType = Array[Byte] + def newMap: PersistentMap[ElementType, ElementType] = newMap(UUID.newUuid.toString) + def newVector: PersistentVector[ElementType] = newVector(UUID.newUuid.toString) + def newRef: PersistentRef[ElementType] = newRef(UUID.newUuid.toString) + + def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id) + def getVector(id: String): PersistentVector[ElementType] = newVector(id) + def getRef(id: String): PersistentRef[ElementType] = newRef(id) + + def newMap(id: String): PersistentMap[ElementType, ElementType] = new VoldemortPersistentMap(id) + def newVector(id: String): PersistentVector[ElementType] = new VoldemortPersistentVector(id) + def newRef(id: String): PersistentRef[ElementType] = new VoldemortPersistentRef(id) +} + + +class VoldemortPersistentMap(id: String) extends PersistentMapBinary { + val uuid = id + val storage = VoldemortStorageBackend +} + + +class VoldemortPersistentVector(id: String) extends PersistentVector[Array[Byte]] { + val uuid = id + val storage = VoldemortStoragebackend +} + +class VoldemortPersistentRef(id: String) extends PersistentRef[Array[Byte]] { + val uuid = id + val storage = VoldemortStoragebackend +} diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala new file mode 100644 index 0000000000..5732fbac8d --- /dev/null +++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala @@ -0,0 +1,129 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.persistence.voldemort + +import se.scalablesolutions.akka.stm._ +import se.scalablesolutions.akka.persistence.common._ +import se.scalablesolutions.akka.util.Logging +import se.scalablesolutions.akka.util.Helpers._ +import se.scalablesolutions.akka.config.Config.config + +import voldemort.client._ +import collection.mutable.{Set, HashSet, ArrayBuffer} +import java.lang.String + + +private[akka] object VoldemortStorageBackend extends +MapStorageBackend[Array[Byte], Array[Byte]] with + VectorStorageBackend[Array[Byte]] with + RefStorageBackend[Array[Byte]] with + Logging { + + /** + * Concat the owner+key+lenght of owner so owned data will be colocated + * Store the length of owner as last byte to work aroune the rarest case + * where ownerbytes1 + keybytes1 == ownerbytes2 + keybytes2 but ownerbytes1 != ownerbytes2 + */ + private def mapKey(owner: String, key: Array[Byte]): Array[Byte] = { + val ownerBytes: Array[Byte] = owner.getBytes("UTF-8") + val ownerLenghtByte = ownerBytes.length.byteValue + val mapKey = new Array[Byte](ownerBytes.length + key.length + 1) + System.arraycopy(ownerBytes, 0, mapKey, 0, ownerBytes.length) + System.arraycopy(key, 0, mapKey, ownerBytes.length, key.length) + mapKey.update(mapKey.length - 1) = ownerLenghtByte + } + + var refClient: StoreClient + var mapKeyClient: StoreClient + var mapValueClient: StoreClient + + + def getRefStorageFor(name: String): Option[Array[Byte]] = { + val result: Array[Byte] = refClient.get(RefKey(name).key) + result match { + case null => None + case _ => Some(result) + } + } + + def insertRefStorageFor(name: String, element: Array[Byte]) = { + refClient.put(RefKey(name).key, element) + } + + def getMapStorageRangeFor(name: String, start: Option[Array[Byte]], finish: Option[Array[Byte]], count: Int): List[(Array[Byte], Array[Byte])] = { + + } + + def getMapStorageFor(name: String): List[(Array[Byte], Array[Byte])] = { + val keys: Set[Array[Byte]] = mapKeyClient.getValue(name, new HashSet[Array[Byte]](0)) + val entries: ArrayBuffer[(Array[Byte], Array[Byte])] = new ArrayBuffer + keys.foreach { + entries += (_, mapValueClient.getValue(mapKey(name, _))) + } + entries.toList + } + + def getMapStorageSizeFor(name: String): Int = { + val keys: Set[Array[Byte]] = mapKeyClient.getValue(name, new HashSet[Array[Byte]](0)) + keys.size + } + + def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = { + val result: Array[Byte] = mapValueClient.get(mapKey(name, key)) + result match { + case null => None + case _ => Some(result) + } + } + + def removeMapStorageFor(name: String, key: Array[Byte]) = { + val keys: Set[Array[Byte]] = mapKeyClient.getValue(name, new HashSet[Array[Byte]](0)) + keys -= key + mapKeyClient.put(name, keys) + mapValueClient.delete(mapKey(name, key)) + } + + + def removeMapStorageFor(name: String) = { + val keys: Set[Array[Byte]] = mapKeyClient.getValue(name, new HashSet[Array[Byte]](0)) + keys.foreach { + mapValueClient.delete(mapKey(name, _)) + } + mapKeyClient.delete(name) + } + + def insertMapStorageEntryFor(name: String, key: Array[Byte], value: Array[Byte]) = { + mapValueClient.put(mapKey(name, key)) + val keys: Set[Array[Byte]] = mapKeyClient.getValue(name, new HashSet[Array[Byte]](0)) + keys += key + mapKeyClient.put(name, keys) + } + + def insertMapStorageEntriesFor(name: String, entries: List[(Array[Byte], Array[Byte])]) = { + val newKeys = new HashSet[Array[Byte]] + entries.foreach { + (key, value) => mapValueClient.put(mapKey(name, key), value) + newKeys += key + } + val keys: Set[Array[Byte]] = mapKeyClient.getValue(name, new HashSet[Array[Byte]](0)) + keys += key + mapKeyClient.put(name, keys) + } + + + def getVectorStorageSizeFor(name: String): Int = null + + def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[Array[Byte]] = null + + def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] = null + + def updateVectorStorageEntryFor(name: String, index: Int, elem: Array[Byte]) = null + + def insertVectorStorageEntriesFor(name: String, elements: List[Array[Byte]]) = null + + def insertVectorStorageEntryFor(name: String, element: Array[Byte]) = null + + +} \ No newline at end of file diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/cluster.xml b/akka-persistence/akka-persistence-voldemort/src/test/resources/cluster.xml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/server.properties b/akka-persistence/akka-persistence-voldemort/src/test/resources/server.properties new file mode 100644 index 0000000000..e69de29bb2 diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/stores.xml b/akka-persistence/akka-persistence-voldemort/src/test/resources/stores.xml new file mode 100644 index 0000000000..f031238e59 --- /dev/null +++ b/akka-persistence/akka-persistence-voldemort/src/test/resources/stores.xml @@ -0,0 +1,52 @@ + + + Refs + 1 + 1 + 1 + 1 + 1 + bdb + client + + string + utf8 + + + identity + + + + MapValues + 1 + 1 + 1 + 1 + 1 + bdb + client + + + + + identity + + + + MapKeys + 1 + 1 + 1 + 1 + 1 + bdb + client + + string + utf8 + + + identity + + + \ No newline at end of file diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala new file mode 100644 index 0000000000..b11a4bba35 --- /dev/null +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala @@ -0,0 +1,17 @@ +package se.scalablesolutions.akka.persistence.voldemort + +import org.scalatest.FunSuite +import org.scalatest.matchers.ShouldMatchers +import se.scalablesolutions.akka.util.UUID + + +/** + * + */ + +class VoldemortStorageBackendSuite extends FunSuite with ShouldMatchers { + + test("UUID generation looks like"){ + System.out.println(UUID.newUuid.toString) + } +} \ No newline at end of file diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index 6a97dbccfd..fe4fb19584 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -50,6 +50,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val SonatypeSnapshotRepo = MavenRepository("Sonatype OSS Repo", "http://oss.sonatype.org/content/repositories/releases") lazy val SunJDMKRepo = MavenRepository("Sun JDMK Repo", "http://wp5.e-taxonomy.eu/cdmlib/mavenrepo") lazy val CasbahRepoReleases = MavenRepository("Casbah Release Repo", "http://repo.bumnetworks.com/releases") + lazy val ClojarsRepo = MavenRepository("Clojars Repo", "http://clojars.org/repo") } // ------------------------------------------------------------------------------------------------------------------- @@ -77,6 +78,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val logbackModuleConfig = ModuleConfiguration("ch.qos.logback",sbt.DefaultMavenRepository) lazy val atomikosModuleConfig = ModuleConfiguration("com.atomikos",sbt.DefaultMavenRepository) lazy val casbahRelease = ModuleConfiguration("com.novus",CasbahRepoReleases) + lazy val voldemortModuleConfig = ModuleConfiguration("voldemort", ClojarsRepo) lazy val embeddedRepo = EmbeddedRepo // This is the only exception, because the embedded repo is fast! // ------------------------------------------------------------------------------------------------------------------- @@ -200,6 +202,9 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val thrift = "com.facebook" % "thrift" % "r917130" % "compile" + lazy val voldemort = "voldemort" % "voldemort" % "0.81" % "compile" + lazy val voldemort_contrib = "voldemort" % "voldemort-contrib" % "0.81" % "compile" + lazy val werkz = "org.codehaus.aspectwerkz" % "aspectwerkz-nodeps-jdk5" % ASPECTWERKZ_VERSION % "compile" lazy val werkz_core = "org.codehaus.aspectwerkz" % "aspectwerkz-jdk5" % ASPECTWERKZ_VERSION % "compile" @@ -461,6 +466,8 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { new AkkaMongoProject(_), akka_persistence_common) lazy val akka_persistence_cassandra = project("akka-persistence-cassandra", "akka-persistence-cassandra", new AkkaCassandraProject(_), akka_persistence_common) + lazy val akka_persistence_voldemort = project("akka-persistence-voldemort", "akka-persistence-voldemort", + new AkkaVoldemortProject(_), akka_persistence_common) } // ------------------------------------------------------------------------------------------------------------------- @@ -510,6 +517,23 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil } + + // ------------------------------------------------------------------------------------------------------------------- + // akka-persistence-voldemort subproject + // ------------------------------------------------------------------------------------------------------------------- + + class AkkaVoldemortProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { + val voldemort = Dependencies.voldemort + val voldemort_contrib = Dependencies.voldemort_contrib + + //testing + val scalatest = Dependencies.scalatest + override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil + } + + + + // ------------------------------------------------------------------------------------------------------------------- // akka-kernel subproject // ------------------------------------------------------------------------------------------------------------------- From c86497a7708930e4eaddebda7a19a43f35cbcf46 Mon Sep 17 00:00:00 2001 From: ticktock Date: Tue, 14 Sep 2010 21:02:22 -0400 Subject: [PATCH 03/52] Initial PersistentMap backend --- .../src/main/scala/VoldemortSession.scala | 24 ------ .../src/main/scala/VoldemortStorage.scala | 4 +- .../main/scala/VoldemortStorageBackend.scala | 86 ++++++++++++------- 3 files changed, 57 insertions(+), 57 deletions(-) delete mode 100644 akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortSession.scala diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortSession.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortSession.scala deleted file mode 100644 index c0eca74832..0000000000 --- a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortSession.scala +++ /dev/null @@ -1,24 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.persistence.voldemort - -import se.scalablesolutions.akka.util.UUID -import se.scalablesolutions.akka.stm._ -import se.scalablesolutions.akka.persistence.common._ -import voldemort.client.StoreClient - - -class VoldemortSession { - - val voldemort: StoreClient - - def getOptionalBytes(name: String): Option[Array[Byte]] = { - - } - - def put(name:) - - -} \ No newline at end of file diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorage.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorage.scala index a590de349b..b906460ca6 100644 --- a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorage.scala +++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorage.scala @@ -34,10 +34,10 @@ class VoldemortPersistentMap(id: String) extends PersistentMapBinary { class VoldemortPersistentVector(id: String) extends PersistentVector[Array[Byte]] { val uuid = id - val storage = VoldemortStoragebackend + val storage = VoldemortStorageBackend } class VoldemortPersistentRef(id: String) extends PersistentRef[Array[Byte]] { val uuid = id - val storage = VoldemortStoragebackend + val storage = VoldemortStorageBackend } diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala index 5732fbac8d..6849aa09b2 100644 --- a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala +++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala @@ -13,6 +13,11 @@ import se.scalablesolutions.akka.config.Config.config import voldemort.client._ import collection.mutable.{Set, HashSet, ArrayBuffer} import java.lang.String +import voldemort.utils.ByteUtils +import collection.immutable.{SortedSet, TreeSet} +import voldemort.versioning.Versioned +import java.util.Map +import collection.JavaConversions private[akka] object VoldemortStorageBackend extends @@ -23,25 +28,30 @@ MapStorageBackend[Array[Byte], Array[Byte]] with /** * Concat the owner+key+lenght of owner so owned data will be colocated - * Store the length of owner as last byte to work aroune the rarest case + * Store the length of owner as last byte to work around the rare case * where ownerbytes1 + keybytes1 == ownerbytes2 + keybytes2 but ownerbytes1 != ownerbytes2 */ private def mapKey(owner: String, key: Array[Byte]): Array[Byte] = { val ownerBytes: Array[Byte] = owner.getBytes("UTF-8") val ownerLenghtByte = ownerBytes.length.byteValue - val mapKey = new Array[Byte](ownerBytes.length + key.length + 1) - System.arraycopy(ownerBytes, 0, mapKey, 0, ownerBytes.length) - System.arraycopy(key, 0, mapKey, ownerBytes.length, key.length) - mapKey.update(mapKey.length - 1) = ownerLenghtByte + val theMapKey = new Array[Byte](ownerBytes.length + key.length + 1) + System.arraycopy(ownerBytes, 0, theMapKey, 0, ownerBytes.length) + System.arraycopy(key, 0, theMapKey, ownerBytes.length, key.length) + theMapKey.update(theMapKey.length - 1, ownerLenghtByte) + theMapKey } - var refClient: StoreClient - var mapKeyClient: StoreClient - var mapValueClient: StoreClient + var refClient: StoreClient[String, Array[Byte]] = null + var mapKeyClient: StoreClient[String, SortedSet[Array[Byte]]] = null + var mapValueClient: StoreClient[Array[Byte], Array[Byte]] = null + + implicit val byteOrder = new Ordering[Array[Byte]] { + override def compare(x: Array[Byte], y: Array[Byte]) = ByteUtils.compare(x, y) + } def getRefStorageFor(name: String): Option[Array[Byte]] = { - val result: Array[Byte] = refClient.get(RefKey(name).key) + val result: Array[Byte] = refClient.getValue(name) result match { case null => None case _ => Some(result) @@ -49,29 +59,41 @@ MapStorageBackend[Array[Byte], Array[Byte]] with } def insertRefStorageFor(name: String, element: Array[Byte]) = { - refClient.put(RefKey(name).key, element) + refClient.put(name, element) } def getMapStorageRangeFor(name: String, start: Option[Array[Byte]], finish: Option[Array[Byte]], count: Int): List[(Array[Byte], Array[Byte])] = { - + val allkeys: SortedSet[Array[Byte]] = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]) + val range = allkeys.rangeImpl(start, finish).take(count) + getKeyValues(range) } def getMapStorageFor(name: String): List[(Array[Byte], Array[Byte])] = { - val keys: Set[Array[Byte]] = mapKeyClient.getValue(name, new HashSet[Array[Byte]](0)) - val entries: ArrayBuffer[(Array[Byte], Array[Byte])] = new ArrayBuffer - keys.foreach { - entries += (_, mapValueClient.getValue(mapKey(name, _))) - } - entries.toList + val keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) + getKeyValues(keys) + } + + private def getKeyValues(keys: SortedSet[Array[Byte]]): List[(Array[Byte], Array[Byte])] = { + val all: Map[Array[Byte], Versioned[Array[Byte]]] = mapValueClient.getAll(JavaConversions.asIterable(keys)) + JavaConversions.asMap(all).foldLeft(new ArrayBuffer[(Array[Byte], Array[Byte])]) { + (buf, keyVal) => { + keyVal match { + case (key, versioned) => { + buf += key -> versioned.getValue + } + } + buf + } + }.toList } def getMapStorageSizeFor(name: String): Int = { - val keys: Set[Array[Byte]] = mapKeyClient.getValue(name, new HashSet[Array[Byte]](0)) + val keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) keys.size } def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = { - val result: Array[Byte] = mapValueClient.get(mapKey(name, key)) + val result: Array[Byte] = mapValueClient.getValue(mapKey(name, key)) result match { case null => None case _ => Some(result) @@ -79,7 +101,7 @@ MapStorageBackend[Array[Byte], Array[Byte]] with } def removeMapStorageFor(name: String, key: Array[Byte]) = { - val keys: Set[Array[Byte]] = mapKeyClient.getValue(name, new HashSet[Array[Byte]](0)) + var keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) keys -= key mapKeyClient.put(name, keys) mapValueClient.delete(mapKey(name, key)) @@ -87,33 +109,35 @@ MapStorageBackend[Array[Byte], Array[Byte]] with def removeMapStorageFor(name: String) = { - val keys: Set[Array[Byte]] = mapKeyClient.getValue(name, new HashSet[Array[Byte]](0)) + val keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) keys.foreach { - mapValueClient.delete(mapKey(name, _)) + key => + mapValueClient.delete(mapKey(name, key)) } mapKeyClient.delete(name) } def insertMapStorageEntryFor(name: String, key: Array[Byte], value: Array[Byte]) = { - mapValueClient.put(mapKey(name, key)) - val keys: Set[Array[Byte]] = mapKeyClient.getValue(name, new HashSet[Array[Byte]](0)) + mapValueClient.put(mapKey(name, key), value) + var keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) keys += key mapKeyClient.put(name, keys) } def insertMapStorageEntriesFor(name: String, entries: List[(Array[Byte], Array[Byte])]) = { - val newKeys = new HashSet[Array[Byte]] - entries.foreach { - (key, value) => mapValueClient.put(mapKey(name, key), value) - newKeys += key + val newKeys = entries.map { + case (key, value) => { + mapValueClient.put(mapKey(name, key), value) + key + } } - val keys: Set[Array[Byte]] = mapKeyClient.getValue(name, new HashSet[Array[Byte]](0)) - keys += key + var keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) + keys ++= newKeys mapKeyClient.put(name, keys) } - def getVectorStorageSizeFor(name: String): Int = null + def getVectorStorageSizeFor(name: String): Int = 0 def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[Array[Byte]] = null From 5ad5a4df07bf5eac70cff953b034ada984a7d08f Mon Sep 17 00:00:00 2001 From: ticktock Date: Wed, 15 Sep 2010 16:25:23 -0400 Subject: [PATCH 04/52] finished ref map vector and some initial test scaffolding --- .../main/scala/VoldemortStorageBackend.scala | 165 ++++-- .../src/test/resources/cluster.xml | 0 .../src/test/resources/config/cluster.xml | 14 + .../test/resources/config/server.properties | 1 + .../test/resources/{ => config}/stores.xml | 41 +- .../src/test/resources/server.properties | 0 .../src/test/scala/EmbeddedVoldemort.scala | 33 ++ .../scala/VoldemortStorageBackendSuite.scala | 55 +- project/build/AkkaProject.scala | 549 ++++++++++-------- 9 files changed, 552 insertions(+), 306 deletions(-) delete mode 100644 akka-persistence/akka-persistence-voldemort/src/test/resources/cluster.xml create mode 100644 akka-persistence/akka-persistence-voldemort/src/test/resources/config/cluster.xml create mode 100644 akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties rename akka-persistence/akka-persistence-voldemort/src/test/resources/{ => config}/stores.xml (54%) delete mode 100644 akka-persistence/akka-persistence-voldemort/src/test/resources/server.properties create mode 100644 akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala index 6849aa09b2..6915a7bca8 100644 --- a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala +++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala @@ -11,13 +11,14 @@ import se.scalablesolutions.akka.util.Helpers._ import se.scalablesolutions.akka.config.Config.config import voldemort.client._ -import collection.mutable.{Set, HashSet, ArrayBuffer} import java.lang.String import voldemort.utils.ByteUtils -import collection.immutable.{SortedSet, TreeSet} import voldemort.versioning.Versioned -import java.util.Map import collection.JavaConversions +import java.nio.ByteBuffer +import collection.immutable.{IndexedSeq, SortedSet, TreeSet} +import collection.mutable.{Map, Set, HashSet, ArrayBuffer} +import java.util.{Map => JMap} private[akka] object VoldemortStorageBackend extends @@ -25,26 +26,27 @@ MapStorageBackend[Array[Byte], Array[Byte]] with VectorStorageBackend[Array[Byte]] with RefStorageBackend[Array[Byte]] with Logging { - - /** - * Concat the owner+key+lenght of owner so owned data will be colocated - * Store the length of owner as last byte to work around the rare case - * where ownerbytes1 + keybytes1 == ownerbytes2 + keybytes2 but ownerbytes1 != ownerbytes2 - */ - private def mapKey(owner: String, key: Array[Byte]): Array[Byte] = { - val ownerBytes: Array[Byte] = owner.getBytes("UTF-8") - val ownerLenghtByte = ownerBytes.length.byteValue - val theMapKey = new Array[Byte](ownerBytes.length + key.length + 1) - System.arraycopy(ownerBytes, 0, theMapKey, 0, ownerBytes.length) - System.arraycopy(key, 0, theMapKey, ownerBytes.length, key.length) - theMapKey.update(theMapKey.length - 1, ownerLenghtByte) - theMapKey + val bootstrapUrl: String = config.getString("akka.storage.voldemort.bootstrap.url", "tcp://localhost:6666") + val refStore = config.getString("akka.storage.voldemort.store.ref", "Refs") + val mapKeyStore = config.getString("akka.storage.voldemort.store.map.key", "MapKeys") + val mapValueStore = config.getString("akka.storage.voldemort.store.map.value", "MapValues") + val vectorSizeStore = config.getString("akka.storage.voldemort.store.vector.size", "VectorSizes") + val vectorValueStore = config.getString("akka.storage.voldemort.store.vectore.value", "VectorValues") + val storeClientFactory = { + if (bootstrapUrl.startsWith("tcp")) { + new SocketStoreClientFactory(new ClientConfig().setBootstrapUrls(bootstrapUrl)) + } else if (bootstrapUrl.startsWith("http")) { + new HttpStoreClientFactory(new ClientConfig().setBootstrapUrls(bootstrapUrl)) + } else { + throw new IllegalArgumentException("Unknown boostrapUrl syntax" + bootstrapUrl) + } } - - var refClient: StoreClient[String, Array[Byte]] = null - var mapKeyClient: StoreClient[String, SortedSet[Array[Byte]]] = null - var mapValueClient: StoreClient[Array[Byte], Array[Byte]] = null - + var refClient: StoreClient[String, Array[Byte]] = storeClientFactory.getStoreClient(refStore) + var mapKeyClient: StoreClient[String, SortedSet[Array[Byte]]] = storeClientFactory.getStoreClient(mapKeyStore) + var mapValueClient: StoreClient[Array[Byte], Array[Byte]] = storeClientFactory.getStoreClient(mapValueStore) + var vectorSizeClient: StoreClient[String, Array[Byte]] = storeClientFactory.getStoreClient(vectorSizeStore) + var vectorValueClient: StoreClient[Array[Byte], Array[Byte]] = storeClientFactory.getStoreClient(vectorValueStore) + val underscoreBytesUTF8 = "_".getBytes("UTF-8") implicit val byteOrder = new Ordering[Array[Byte]] { override def compare(x: Array[Byte], y: Array[Byte]) = ByteUtils.compare(x, y) } @@ -74,8 +76,8 @@ MapStorageBackend[Array[Byte], Array[Byte]] with } private def getKeyValues(keys: SortedSet[Array[Byte]]): List[(Array[Byte], Array[Byte])] = { - val all: Map[Array[Byte], Versioned[Array[Byte]]] = mapValueClient.getAll(JavaConversions.asIterable(keys)) - JavaConversions.asMap(all).foldLeft(new ArrayBuffer[(Array[Byte], Array[Byte])]) { + val all: JMap[Array[Byte], Versioned[Array[Byte]]] = mapValueClient.getAll(JavaConversions.asIterable(keys)) + JavaConversions.asMap(all).foldLeft(new ArrayBuffer[(Array[Byte], Array[Byte])](all.size)) { (buf, keyVal) => { keyVal match { case (key, versioned) => { @@ -93,7 +95,7 @@ MapStorageBackend[Array[Byte], Array[Byte]] with } def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = { - val result: Array[Byte] = mapValueClient.getValue(mapKey(name, key)) + val result: Array[Byte] = mapValueClient.getValue(getKey(name, key)) result match { case null => None case _ => Some(result) @@ -104,7 +106,7 @@ MapStorageBackend[Array[Byte], Array[Byte]] with var keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) keys -= key mapKeyClient.put(name, keys) - mapValueClient.delete(mapKey(name, key)) + mapValueClient.delete(getKey(name, key)) } @@ -112,13 +114,13 @@ MapStorageBackend[Array[Byte], Array[Byte]] with val keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) keys.foreach { key => - mapValueClient.delete(mapKey(name, key)) + mapValueClient.delete(getKey(name, key)) } mapKeyClient.delete(name) } def insertMapStorageEntryFor(name: String, key: Array[Byte], value: Array[Byte]) = { - mapValueClient.put(mapKey(name, key), value) + mapValueClient.put(getKey(name, key), value) var keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) keys += key mapKeyClient.put(name, keys) @@ -127,7 +129,7 @@ MapStorageBackend[Array[Byte], Array[Byte]] with def insertMapStorageEntriesFor(name: String, entries: List[(Array[Byte], Array[Byte])]) = { val newKeys = entries.map { case (key, value) => { - mapValueClient.put(mapKey(name, key), value) + mapValueClient.put(getKey(name, key), value) key } } @@ -137,17 +139,110 @@ MapStorageBackend[Array[Byte], Array[Byte]] with } - def getVectorStorageSizeFor(name: String): Int = 0 + def getVectorStorageSizeFor(name: String): Int = { + IntSerializer.fromBytes(vectorSizeClient.getValue(name, IntSerializer.toBytes(0))) + } - def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[Array[Byte]] = null - def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] = null + def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[Array[Byte]] = { + val size = getVectorStorageSizeFor(name) + val st = start.getOrElse(0) + val cnt = + if (finish.isDefined) { + val f = finish.get + if (f >= st) (f - st) else count + } else { + count + } + val seq: IndexedSeq[Array[Byte]] = (st to st + cnt).map { + index => getVectorValueKey(name, index) + } - def updateVectorStorageEntryFor(name: String, index: Int, elem: Array[Byte]) = null + val all: JMap[Array[Byte], Versioned[Array[Byte]]] = vectorValueClient.getAll(JavaConversions.asIterable(seq)) - def insertVectorStorageEntriesFor(name: String, elements: List[Array[Byte]]) = null + val buf = new ArrayBuffer[Array[Byte]](seq.size) + seq.foreach { + key => { + val index = getIndexFromVectorValueKey(name, key) + var value: Array[Byte] = null + if (all.containsKey(key)) { + value = all.get(key).getValue + } else { + value = Array.empty[Byte] + } + buf.update(index, value) + } + } + buf.toList + } - def insertVectorStorageEntryFor(name: String, element: Array[Byte]) = null + def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] = { + vectorValueClient.getValue(getVectorValueKey(name, index), Array.empty[Byte]) + } + + def updateVectorStorageEntryFor(name: String, index: Int, elem: Array[Byte]) = { + val size = getVectorStorageSizeFor(name) + vectorValueClient.put(getVectorValueKey(name, index), elem) + if (size < index + 1) { + vectorSizeClient.put(name, IntSerializer.toBytes(index + 1)) + } + } + + def insertVectorStorageEntriesFor(name: String, elements: List[Array[Byte]]) = { + var size = getVectorStorageSizeFor(name) + elements.foreach { + element => + vectorValueClient.put(getVectorValueKey(name, size), element) + size += 1 + } + vectorSizeClient.put(name, IntSerializer.toBytes(size)) + } + + def insertVectorStorageEntryFor(name: String, element: Array[Byte]) = { + insertVectorStorageEntriesFor(name, List(element)) + } + + + /** + * Concat the ownerlenght+owner+key+ of owner so owned data will be colocated + * Store the length of owner as first byte to work around the rare case + * where ownerbytes1 + keybytes1 == ownerbytes2 + keybytes2 but ownerbytes1 != ownerbytes2 + */ + def getKey(owner: String, key: Array[Byte]): Array[Byte] = { + val ownerBytes: Array[Byte] = owner.getBytes("UTF-8") + val ownerLenghtBytes: Array[Byte] = IntSerializer.toBytes(owner.length) + val theKey = new Array[Byte](ownerLenghtBytes.length + ownerBytes.length + key.length) + System.arraycopy(ownerLenghtBytes, 0, theKey, 0, ownerLenghtBytes.length) + System.arraycopy(ownerBytes, 0, theKey, ownerLenghtBytes.length, ownerBytes.length) + System.arraycopy(key, 0, theKey, ownerLenghtBytes.length + ownerBytes.length, key.length) + theKey + } + + def getVectorValueKey(owner: String, index: Int): Array[Byte] = { + val indexbytes = IntSerializer.toBytes(index) + val theIndexKey = new Array[Byte](underscoreBytesUTF8.length + indexbytes.length) + System.arraycopy(underscoreBytesUTF8, 0, theIndexKey, 0, underscoreBytesUTF8.length) + System.arraycopy(indexbytes, 0, theIndexKey, underscoreBytesUTF8.length, indexbytes.length) + getKey(owner, theIndexKey) + } + + def getIndexFromVectorValueKey(owner: String, key: Array[Byte]): Int = { + val indexBytes = new Array[Byte](IntSerializer.bytesPerInt) + System.arraycopy(key, key.length - IntSerializer.bytesPerInt - 1, indexBytes, 0, IntSerializer.bytesPerInt) + IntSerializer.fromBytes(indexBytes) + } + + object IntSerializer { + val bytesPerInt = java.lang.Integer.SIZE / java.lang.Byte.SIZE + + def toBytes(i: Int) = ByteBuffer.wrap(new Array[Byte](bytesPerInt)).putInt(i).array() + + def fromBytes(bytes: Array[Byte]) = ByteBuffer.wrap(bytes).getInt() + + def toString(obj: Int) = obj.toString + + def fromString(str: String) = str.toInt + } } \ No newline at end of file diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/cluster.xml b/akka-persistence/akka-persistence-voldemort/src/test/resources/cluster.xml deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/cluster.xml b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/cluster.xml new file mode 100644 index 0000000000..dcf806b0ca --- /dev/null +++ b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/cluster.xml @@ -0,0 +1,14 @@ + + + akka-test + + + 0 + localhost + 8081 + 6666 + 6667 + + 0,1,2,3 + + diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties new file mode 100644 index 0000000000..1e6af91e2d --- /dev/null +++ b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties @@ -0,0 +1 @@ +node.id=0 diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/stores.xml b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml similarity index 54% rename from akka-persistence/akka-persistence-voldemort/src/test/resources/stores.xml rename to akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml index f031238e59..b0491ea9a4 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/resources/stores.xml +++ b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml @@ -6,7 +6,7 @@ 1 1 1 - bdb + memory client string @@ -23,10 +23,10 @@ 1 1 1 - bdb + memory client - + identity identity @@ -39,7 +39,40 @@ 1 1 1 - bdb + memory + client + + string + utf8 + + + java-serialization + + + + VectorValues + 1 + 1 + 1 + 1 + 1 + memory + client + + identity + + + identity + + + + VectorSizes + 1 + 1 + 1 + 1 + 1 + memory client string diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/server.properties b/akka-persistence/akka-persistence-voldemort/src/test/resources/server.properties deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala new file mode 100644 index 0000000000..5631542a78 --- /dev/null +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala @@ -0,0 +1,33 @@ +package se.scalablesolutions.akka.persistence.voldemort + +import org.scalatest.matchers.ShouldMatchers +import se.scalablesolutions.akka.util.UUID +import voldemort.server.{VoldemortServer, VoldemortConfig} +import org.scalatest.{Suite, BeforeAndAfterAll, FunSuite} +import org.junit.runner.RunWith +import org.scalatest.junit.JUnitRunner +import voldemort.utils.Utils +import java.io.File + +@RunWith(classOf[JUnitRunner]) +trait EmbeddedVoldemort extends BeforeAndAfterAll { + this: Suite => + var server: VoldemortServer = null + + override protected def beforeAll(): Unit = { + + try { + val dir = "./akka-persistence/akka-persistence-voldemort/src/test/resources" + val home = new File(dir) + val config = VoldemortConfig.loadFromVoldemortHome(home.getCanonicalPath) + server = new VoldemortServer(config) + server.start + } catch { + case e => e.printStackTrace + } + } + + override protected def afterAll(): Unit = { + server.stop + } +} \ No newline at end of file diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala index b11a4bba35..68f1ebed0f 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala @@ -2,16 +2,51 @@ package se.scalablesolutions.akka.persistence.voldemort import org.scalatest.FunSuite import org.scalatest.matchers.ShouldMatchers -import se.scalablesolutions.akka.util.UUID +import org.junit.runner.RunWith +import org.scalatest.junit.JUnitRunner +import se.scalablesolutions.akka.persistence.voldemort.VoldemortStorageBackend._ +import se.scalablesolutions.akka.util.{Logging, UUID} +import collection.immutable.TreeSet - -/** - * - */ - -class VoldemortStorageBackendSuite extends FunSuite with ShouldMatchers { - - test("UUID generation looks like"){ - System.out.println(UUID.newUuid.toString) +@RunWith(classOf[JUnitRunner]) +class VoldemortStorageBackendSuite extends FunSuite with ShouldMatchers with EmbeddedVoldemort with Logging { + test("that ref storage and retrieval works") { + refClient.put("testRef", "testRefValue".getBytes("UTF-8")) + new String(refClient.getValue("testRef", Array.empty[Byte]), "UTF-8") should be("testRefValue") } + + test("that map key storage and retrieval works") { + val mapKeys = new TreeSet[Array[Byte]] + "key1".getBytes + mapKeyClient.put("testMapKey", mapKeys) + val returned = mapKeyClient.getValue("testMapKey", new TreeSet[Array[Byte]]) + returned should equal(mapKeys) + } + + test("that map value storage and retrieval works") { + val key = "keyForTestingMapValueClient".getBytes("UTF-8") + val value = "value for testing map value client".getBytes("UTF-8") + mapValueClient.put(key, value) + mapValueClient.getValue(key) should equal(value) + } + + test("that vector size storage and retrieval works") { + val key = "vectorKey" + vectorSizeClient.put(key, IntSerializer.toBytes(17)) + vectorSizeClient.getValue(key) should equal(IntSerializer.toBytes(17)) + } + + test("that vector value storage and retrieval works") { + val key = "vectorValueKey" + val index = 3 + val value = "some bytes".getBytes("UTF-8") + val vecKey = getVectorValueKey(key, index) + try{ + val idx = getIndexFromVectorValueKey(key, vecKey) + vectorValueClient.put(vecKey, value) + vectorValueClient.get(vecKey) should equal(value) + } catch{ + case e => e.printStackTrace + } + } + } \ No newline at end of file diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index fe4fb19584..b74dfea6e2 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -1,4 +1,4 @@ - /*---------------------------------------------------------------------------\ +/*---------------------------------------------------------------------------\ | Copyright (C) 2009-2010 Scalable Solutions AB | \---------------------------------------------------------------------------*/ @@ -17,13 +17,14 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- override def compileOptions = super.compileOptions ++ - Seq("-deprecation", - "-Xmigration", - "-Xcheckinit", - "-Xstrict-warnings", - "-Xwarninit", - "-encoding", "utf8") - .map(x => CompileOption(x)) + Seq("-deprecation", + "-Xmigration", + "-Xcheckinit", + "-Xstrict-warnings", + "-Xwarninit", + "-encoding", "utf8") + .map(x => CompileOption(x)) + override def javaCompileOptions = JavaCompileOption("-Xlint:unchecked") :: super.javaCompileOptions.toList // ------------------------------------------------------------------------------------------------------------------- @@ -32,25 +33,28 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val deployPath = info.projectPath / "deploy" lazy val distPath = info.projectPath / "dist" + def distName = "%s_%s-%s.zip".format(name, buildScalaVersion, version) - lazy val dist = zipTask(allArtifacts, "dist", distName) dependsOn (`package`) describedAs("Zips up the distribution.") + + lazy val dist = zipTask(allArtifacts, "dist", distName) dependsOn (`package`) describedAs ("Zips up the distribution.") // ------------------------------------------------------------------------------------------------------------------- // All repositories *must* go here! See ModuleConigurations below. // ------------------------------------------------------------------------------------------------------------------- object Repositories { - lazy val AkkaRepo = MavenRepository("Akka Repository", "http://scalablesolutions.se/akka/repository") - lazy val CodehausRepo = MavenRepository("Codehaus Repo", "http://repository.codehaus.org") - lazy val EmbeddedRepo = MavenRepository("Embedded Repo", (info.projectPath / "embedded-repo").asURL.toString) + lazy val AkkaRepo = MavenRepository("Akka Repository", "http://scalablesolutions.se/akka/repository") + lazy val CodehausRepo = MavenRepository("Codehaus Repo", "http://repository.codehaus.org") + lazy val EmbeddedRepo = MavenRepository("Embedded Repo", (info.projectPath / "embedded-repo").asURL.toString) lazy val FusesourceSnapshotRepo = MavenRepository("Fusesource Snapshots", "http://repo.fusesource.com/nexus/content/repositories/snapshots") - lazy val GuiceyFruitRepo = MavenRepository("GuiceyFruit Repo", "http://guiceyfruit.googlecode.com/svn/repo/releases/") - lazy val JBossRepo = MavenRepository("JBoss Repo", "https://repository.jboss.org/nexus/content/groups/public/") - lazy val JavaNetRepo = MavenRepository("java.net Repo", "http://download.java.net/maven/2") + lazy val GuiceyFruitRepo = MavenRepository("GuiceyFruit Repo", "http://guiceyfruit.googlecode.com/svn/repo/releases/") + lazy val JBossRepo = MavenRepository("JBoss Repo", "https://repository.jboss.org/nexus/content/groups/public/") + lazy val JavaNetRepo = MavenRepository("java.net Repo", "http://download.java.net/maven/2") lazy val SonatypeSnapshotRepo = MavenRepository("Sonatype OSS Repo", "http://oss.sonatype.org/content/repositories/releases") - lazy val SunJDMKRepo = MavenRepository("Sun JDMK Repo", "http://wp5.e-taxonomy.eu/cdmlib/mavenrepo") - lazy val CasbahRepoReleases = MavenRepository("Casbah Release Repo", "http://repo.bumnetworks.com/releases") - lazy val ClojarsRepo = MavenRepository("Clojars Repo", "http://clojars.org/repo") + lazy val SunJDMKRepo = MavenRepository("Sun JDMK Repo", "http://wp5.e-taxonomy.eu/cdmlib/mavenrepo") + lazy val CasbahRepoReleases = MavenRepository("Casbah Release Repo", "http://repo.bumnetworks.com/releases") + lazy val ClojarsRepo = MavenRepository("Clojars Repo", "http://clojars.org/repo") + lazy val OracleRepo = MavenRepository("Oracle Repo", "http://download.oracle.com/maven") } // ------------------------------------------------------------------------------------------------------------------- @@ -61,44 +65,45 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- import Repositories._ - lazy val atmosphereModuleConfig = ModuleConfiguration("org.atmosphere", SonatypeSnapshotRepo) - lazy val jettyModuleConfig = ModuleConfiguration("org.eclipse.jetty", sbt.DefaultMavenRepository) + lazy val atmosphereModuleConfig = ModuleConfiguration("org.atmosphere", SonatypeSnapshotRepo) + lazy val jettyModuleConfig = ModuleConfiguration("org.eclipse.jetty", sbt.DefaultMavenRepository) lazy val guiceyFruitModuleConfig = ModuleConfiguration("org.guiceyfruit", GuiceyFruitRepo) // lazy val hawtdispatchModuleConfig = ModuleConfiguration("org.fusesource.hawtdispatch", FusesourceSnapshotRepo) - lazy val jbossModuleConfig = ModuleConfiguration("org.jboss", JBossRepo) - lazy val jdmkModuleConfig = ModuleConfiguration("com.sun.jdmk", SunJDMKRepo) - lazy val jmsModuleConfig = ModuleConfiguration("javax.jms", SunJDMKRepo) - lazy val jmxModuleConfig = ModuleConfiguration("com.sun.jmx", SunJDMKRepo) + lazy val jbossModuleConfig = ModuleConfiguration("org.jboss", JBossRepo) + lazy val jdmkModuleConfig = ModuleConfiguration("com.sun.jdmk", SunJDMKRepo) + lazy val jmsModuleConfig = ModuleConfiguration("javax.jms", SunJDMKRepo) + lazy val jmxModuleConfig = ModuleConfiguration("com.sun.jmx", SunJDMKRepo) lazy val jerseyContrModuleConfig = ModuleConfiguration("com.sun.jersey.contribs", JavaNetRepo) - lazy val jerseyModuleConfig = ModuleConfiguration("com.sun.jersey", JavaNetRepo) - lazy val jgroupsModuleConfig = ModuleConfiguration("jgroups", JBossRepo) - lazy val multiverseModuleConfig = ModuleConfiguration("org.multiverse", CodehausRepo) - lazy val nettyModuleConfig = ModuleConfiguration("org.jboss.netty", JBossRepo) - lazy val scalaTestModuleConfig = ModuleConfiguration("org.scalatest", ScalaToolsSnapshots) - lazy val logbackModuleConfig = ModuleConfiguration("ch.qos.logback",sbt.DefaultMavenRepository) - lazy val atomikosModuleConfig = ModuleConfiguration("com.atomikos",sbt.DefaultMavenRepository) - lazy val casbahRelease = ModuleConfiguration("com.novus",CasbahRepoReleases) - lazy val voldemortModuleConfig = ModuleConfiguration("voldemort", ClojarsRepo) - lazy val embeddedRepo = EmbeddedRepo // This is the only exception, because the embedded repo is fast! + lazy val jerseyModuleConfig = ModuleConfiguration("com.sun.jersey", JavaNetRepo) + lazy val jgroupsModuleConfig = ModuleConfiguration("jgroups", JBossRepo) + lazy val multiverseModuleConfig = ModuleConfiguration("org.multiverse", CodehausRepo) + lazy val nettyModuleConfig = ModuleConfiguration("org.jboss.netty", JBossRepo) + lazy val scalaTestModuleConfig = ModuleConfiguration("org.scalatest", ScalaToolsSnapshots) + lazy val logbackModuleConfig = ModuleConfiguration("ch.qos.logback", sbt.DefaultMavenRepository) + lazy val atomikosModuleConfig = ModuleConfiguration("com.atomikos", sbt.DefaultMavenRepository) + lazy val casbahRelease = ModuleConfiguration("com.novus", CasbahRepoReleases) + lazy val voldemortModuleConfig = ModuleConfiguration("voldemort", ClojarsRepo) + lazy val sleepycatModuleConfig = ModuleConfiguration("com.sleepycat", OracleRepo) + lazy val embeddedRepo = EmbeddedRepo // This is the only exception, because the embedded repo is fast! // ------------------------------------------------------------------------------------------------------------------- // Versions // ------------------------------------------------------------------------------------------------------------------- - lazy val ATMO_VERSION = "0.6.1" - lazy val CAMEL_VERSION = "2.4.0" - lazy val CASSANDRA_VERSION = "0.6.1" - lazy val DISPATCH_VERSION = "0.7.4" + lazy val ATMO_VERSION = "0.6.1" + lazy val CAMEL_VERSION = "2.4.0" + lazy val CASSANDRA_VERSION = "0.6.1" + lazy val DISPATCH_VERSION = "0.7.4" lazy val HAWT_DISPATCH_VERSION = "1.0" - lazy val JACKSON_VERSION = "1.2.1" - lazy val JERSEY_VERSION = "1.2" - lazy val MULTIVERSE_VERSION = "0.6.1" - lazy val SCALATEST_VERSION = "1.2-for-scala-2.8.0.final-SNAPSHOT" - lazy val LOGBACK_VERSION = "0.9.24" - lazy val SLF4J_VERSION = "1.6.0" - lazy val SPRING_VERSION = "3.0.3.RELEASE" - lazy val ASPECTWERKZ_VERSION = "2.2.1" - lazy val JETTY_VERSION = "7.1.4.v20100610" + lazy val JACKSON_VERSION = "1.2.1" + lazy val JERSEY_VERSION = "1.2" + lazy val MULTIVERSE_VERSION = "0.6.1" + lazy val SCALATEST_VERSION = "1.2-for-scala-2.8.0.final-SNAPSHOT" + lazy val LOGBACK_VERSION = "0.9.24" + lazy val SLF4J_VERSION = "1.6.0" + lazy val SPRING_VERSION = "3.0.3.RELEASE" + lazy val ASPECTWERKZ_VERSION = "2.2.1" + lazy val JETTY_VERSION = "7.1.4.v20100610" // ------------------------------------------------------------------------------------------------------------------- // Dependencies @@ -112,14 +117,14 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val aopalliance = "aopalliance" % "aopalliance" % "1.0" % "compile" - lazy val atmo = "org.atmosphere" % "atmosphere-annotations" % ATMO_VERSION % "compile" + lazy val atmo = "org.atmosphere" % "atmosphere-annotations" % ATMO_VERSION % "compile" lazy val atmo_jbossweb = "org.atmosphere" % "atmosphere-compat-jbossweb" % ATMO_VERSION % "compile" - lazy val atmo_jersey = "org.atmosphere" % "atmosphere-jersey" % ATMO_VERSION % "compile" - lazy val atmo_runtime = "org.atmosphere" % "atmosphere-runtime" % ATMO_VERSION % "compile" - lazy val atmo_tomcat = "org.atmosphere" % "atmosphere-compat-tomcat" % ATMO_VERSION % "compile" + lazy val atmo_jersey = "org.atmosphere" % "atmosphere-jersey" % ATMO_VERSION % "compile" + lazy val atmo_runtime = "org.atmosphere" % "atmosphere-runtime" % ATMO_VERSION % "compile" + lazy val atmo_tomcat = "org.atmosphere" % "atmosphere-compat-tomcat" % ATMO_VERSION % "compile" lazy val atmo_weblogic = "org.atmosphere" % "atmosphere-compat-weblogic" % ATMO_VERSION % "compile" - lazy val atomikos_transactions = "com.atomikos" % "transactions" % "3.2.3" % "compile" + lazy val atomikos_transactions = "com.atomikos" % "transactions" % "3.2.3" % "compile" lazy val atomikos_transactions_api = "com.atomikos" % "transactions-api" % "3.2.3" % "compile" lazy val atomikos_transactions_jta = "com.atomikos" % "transactions-jta" % "3.2.3" % "compile" @@ -138,9 +143,9 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val dispatch_http = "net.databinder" % "dispatch-http_2.8.0" % DISPATCH_VERSION % "compile" lazy val dispatch_json = "net.databinder" % "dispatch-json_2.8.0" % DISPATCH_VERSION % "compile" - lazy val jetty = "org.eclipse.jetty" % "jetty-server" % JETTY_VERSION % "compile" - lazy val jetty_util = "org.eclipse.jetty" % "jetty-util" % JETTY_VERSION % "compile" - lazy val jetty_xml = "org.eclipse.jetty" % "jetty-xml" % JETTY_VERSION % "compile" + lazy val jetty = "org.eclipse.jetty" % "jetty-server" % JETTY_VERSION % "compile" + lazy val jetty_util = "org.eclipse.jetty" % "jetty-util" % JETTY_VERSION % "compile" + lazy val jetty_xml = "org.eclipse.jetty" % "jetty-xml" % JETTY_VERSION % "compile" lazy val jetty_servlet = "org.eclipse.jetty" % "jetty-servlet" % JETTY_VERSION % "compile" lazy val guicey = "org.guiceyfruit" % "guice-all" % "2.0" % "compile" @@ -149,14 +154,14 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val hawtdispatch = "org.fusesource.hawtdispatch" % "hawtdispatch-scala" % HAWT_DISPATCH_VERSION % "compile" - lazy val jackson = "org.codehaus.jackson" % "jackson-mapper-asl" % JACKSON_VERSION % "compile" - lazy val jackson_core = "org.codehaus.jackson" % "jackson-core-asl" % JACKSON_VERSION % "compile" - lazy val jackson_core_asl = "org.codehaus.jackson" % "jackson-core-asl" % JACKSON_VERSION % "compile" + lazy val jackson = "org.codehaus.jackson" % "jackson-mapper-asl" % JACKSON_VERSION % "compile" + lazy val jackson_core = "org.codehaus.jackson" % "jackson-core-asl" % JACKSON_VERSION % "compile" + lazy val jackson_core_asl = "org.codehaus.jackson" % "jackson-core-asl" % JACKSON_VERSION % "compile" - lazy val jersey = "com.sun.jersey" % "jersey-core" % JERSEY_VERSION % "compile" - lazy val jersey_json = "com.sun.jersey" % "jersey-json" % JERSEY_VERSION % "compile" - lazy val jersey_server = "com.sun.jersey" % "jersey-server" % JERSEY_VERSION % "compile" - lazy val jersey_contrib = "com.sun.jersey.contribs" % "jersey-scala" % JERSEY_VERSION % "compile" + lazy val jersey = "com.sun.jersey" % "jersey-core" % JERSEY_VERSION % "compile" + lazy val jersey_json = "com.sun.jersey" % "jersey-json" % JERSEY_VERSION % "compile" + lazy val jersey_server = "com.sun.jersey" % "jersey-server" % JERSEY_VERSION % "compile" + lazy val jersey_contrib = "com.sun.jersey.contribs" % "jersey-scala" % JERSEY_VERSION % "compile" lazy val jgroups = "jgroups" % "jgroups" % "2.9.0.GA" % "compile" @@ -190,56 +195,64 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val sjson = "sjson.json" % "sjson" % "0.8-SNAPSHOT-2.8.0" % "compile" - lazy val slf4j = "org.slf4j" % "slf4j-api" % SLF4J_VERSION % "compile" + lazy val slf4j = "org.slf4j" % "slf4j-api" % SLF4J_VERSION % "compile" - lazy val logback = "ch.qos.logback" % "logback-classic" % LOGBACK_VERSION % "compile" + lazy val logback = "ch.qos.logback" % "logback-classic" % LOGBACK_VERSION % "compile" lazy val logback_core = "ch.qos.logback" % "logback-core" % LOGBACK_VERSION % "compile" - lazy val spring_beans = "org.springframework" % "spring-beans" % SPRING_VERSION % "compile" + lazy val spring_beans = "org.springframework" % "spring-beans" % SPRING_VERSION % "compile" lazy val spring_context = "org.springframework" % "spring-context" % SPRING_VERSION % "compile" lazy val stax_api = "javax.xml.stream" % "stax-api" % "1.0-2" % "compile" lazy val thrift = "com.facebook" % "thrift" % "r917130" % "compile" - lazy val voldemort = "voldemort" % "voldemort" % "0.81" % "compile" - lazy val voldemort_contrib = "voldemort" % "voldemort-contrib" % "0.81" % "compile" + lazy val voldemort = "voldemort" % "voldemort" % "0.81" % "compile" + lazy val voldemort_contrib = "voldemort" % "voldemort-contrib" % "0.81" % "compile" + lazy val voldemort_needs_log4j = "log4j" % "log4j" % "1.2.16" % "compile" - lazy val werkz = "org.codehaus.aspectwerkz" % "aspectwerkz-nodeps-jdk5" % ASPECTWERKZ_VERSION % "compile" - lazy val werkz_core = "org.codehaus.aspectwerkz" % "aspectwerkz-jdk5" % ASPECTWERKZ_VERSION % "compile" + lazy val werkz = "org.codehaus.aspectwerkz" % "aspectwerkz-nodeps-jdk5" % ASPECTWERKZ_VERSION % "compile" + lazy val werkz_core = "org.codehaus.aspectwerkz" % "aspectwerkz-jdk5" % ASPECTWERKZ_VERSION % "compile" // Test - lazy val camel_spring = "org.apache.camel" % "camel-spring" % CAMEL_VERSION % "test" - lazy val cassandra_clhm = "org.apache.cassandra" % "clhm-production" % CASSANDRA_VERSION % "test" - lazy val commons_coll = "commons-collections" % "commons-collections" % "3.2.1" % "test" - lazy val google_coll = "com.google.collections" % "google-collections" % "1.0" % "test" - lazy val high_scale = "org.apache.cassandra" % "high-scale-lib" % CASSANDRA_VERSION % "test" - lazy val testJetty = "org.eclipse.jetty" % "jetty-server" % JETTY_VERSION % "test" - lazy val testJettyWebApp= "org.eclipse.jetty" % "jetty-webapp" % JETTY_VERSION % "test" + lazy val camel_spring = "org.apache.camel" % "camel-spring" % CAMEL_VERSION % "test" + lazy val cassandra_clhm = "org.apache.cassandra" % "clhm-production" % CASSANDRA_VERSION % "test" + lazy val commons_coll = "commons-collections" % "commons-collections" % "3.2.1" % "test" + lazy val google_coll = "com.google.collections" % "google-collections" % "1.0" % "test" + lazy val high_scale = "org.apache.cassandra" % "high-scale-lib" % CASSANDRA_VERSION % "test" + lazy val testJetty = "org.eclipse.jetty" % "jetty-server" % JETTY_VERSION % "test" + lazy val testJettyWebApp = "org.eclipse.jetty" % "jetty-webapp" % JETTY_VERSION % "test" - lazy val junit = "junit" % "junit" % "4.5" % "test" - lazy val mockito = "org.mockito" % "mockito-all" % "1.8.1" % "test" - lazy val scalatest = "org.scalatest" % "scalatest" % SCALATEST_VERSION % "test" + lazy val junit = "junit" % "junit" % "4.5" % "test" + lazy val mockito = "org.mockito" % "mockito-all" % "1.8.1" % "test" + lazy val scalatest = "org.scalatest" % "scalatest" % SCALATEST_VERSION % "test" + + //voldemort testing /home/sclasen/projects/akka/akka-persistence-voldemort/src/test/resources/ + lazy val jdom = "org.jdom" % "jdom" % "1.1" % "test" + lazy val vold_jetty = "org.mortbay.jetty" % "jetty" % "6.1.18" % "test" + lazy val velocity = "org.apache.velocity" % "velocity" % "1.6.2" % "test" + lazy val bdb = "com.sleepycat" % "je" % "4.0.103" % "test" + lazy val dbcp = "commons-dbcp" % "commons-dbcp" % "1.2.2" % "test" } // ------------------------------------------------------------------------------------------------------------------- // Subprojects // ------------------------------------------------------------------------------------------------------------------- - lazy val akka_actor = project("akka-actor", "akka-actor", new AkkaActorProject(_)) + lazy val akka_actor = project("akka-actor", "akka-actor", new AkkaActorProject(_)) lazy val akka_typed_actor = project("akka-typed-actor", "akka-typed-actor", new AkkaTypedActorProject(_), akka_actor) - lazy val akka_remote = project("akka-remote", "akka-remote", new AkkaRemoteProject(_), akka_typed_actor) - lazy val akka_amqp = project("akka-amqp", "akka-amqp", new AkkaAMQPProject(_), akka_remote) - lazy val akka_http = project("akka-http", "akka-http", new AkkaHttpProject(_), akka_remote, akka_camel) - lazy val akka_camel = project("akka-camel", "akka-camel", new AkkaCamelProject(_), akka_remote) + lazy val akka_remote = project("akka-remote", "akka-remote", new AkkaRemoteProject(_), akka_typed_actor) + lazy val akka_amqp = project("akka-amqp", "akka-amqp", new AkkaAMQPProject(_), akka_remote) + lazy val akka_http = project("akka-http", "akka-http", new AkkaHttpProject(_), akka_remote, akka_camel) + lazy val akka_camel = project("akka-camel", "akka-camel", new AkkaCamelProject(_), akka_remote) lazy val akka_persistence = project("akka-persistence", "akka-persistence", new AkkaPersistenceParentProject(_)) - lazy val akka_spring = project("akka-spring", "akka-spring", new AkkaSpringProject(_), akka_remote, akka_camel) - lazy val akka_jta = project("akka-jta", "akka-jta", new AkkaJTAProject(_), akka_remote) - lazy val akka_kernel = project("akka-kernel", "akka-kernel", new AkkaKernelProject(_), - akka_remote, akka_jta, akka_http, akka_spring, akka_camel, akka_persistence, akka_amqp) - lazy val akka_osgi = project("akka-osgi", "akka-osgi", new AkkaOSGiParentProject(_)) - lazy val akka_samples = project("akka-samples", "akka-samples", new AkkaSamplesParentProject(_)) + lazy val akka_spring = project("akka-spring", "akka-spring", new AkkaSpringProject(_), akka_remote, akka_camel) + lazy val akka_jta = project("akka-jta", "akka-jta", new AkkaJTAProject(_), akka_remote) + lazy val akka_kernel = project("akka-kernel", "akka-kernel", new AkkaKernelProject(_), + akka_remote, akka_jta, akka_http, akka_spring, akka_camel, akka_persistence, akka_amqp) + lazy val akka_osgi = project("akka-osgi", "akka-osgi", new AkkaOSGiParentProject(_)) + lazy val akka_samples = project("akka-samples", "akka-samples", new AkkaSamplesParentProject(_)) // ------------------------------------------------------------------------------------------------------------------- // Miscellaneous @@ -253,37 +266,37 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { (IMPLEMENTATION_TITLE, "Akka"), (IMPLEMENTATION_URL, "http://akkasource.org"), (IMPLEMENTATION_VENDOR, "The Akka Project") - )).toList ::: - getMainClass(false).map(MainClass(_)).toList + )).toList ::: + getMainClass(false).map(MainClass(_)).toList // create a manifest with all akka jars and dependency jars on classpath override def manifestClassPath = Some(allArtifacts.getFiles - .filter(_.getName.endsWith(".jar")) - .filter(!_.getName.contains("servlet_2.4")) - .filter(!_.getName.contains("scala-library")) - .map("lib_managed/scala_%s/compile/".format(buildScalaVersion) + _.getName) - .mkString(" ") + - " config/" + - " scala-library.jar" + - " dist/akka-actor_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-typed-actor_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-remote_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-http_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-camel_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-amqp_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-persistence-common_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-persistence-redis_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-persistence-mongo_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-persistence-cassandra_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-kernel_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-spring_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-jta_%s-%s.jar".format(buildScalaVersion, version) + .filter(_.getName.endsWith(".jar")) + .filter(!_.getName.contains("servlet_2.4")) + .filter(!_.getName.contains("scala-library")) + .map("lib_managed/scala_%s/compile/".format(buildScalaVersion) + _.getName) + .mkString(" ") + + " config/" + + " scala-library.jar" + + " dist/akka-actor_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-typed-actor_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-remote_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-http_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-camel_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-amqp_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-persistence-common_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-persistence-redis_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-persistence-mongo_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-persistence-cassandra_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-kernel_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-spring_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-jta_%s-%s.jar".format(buildScalaVersion, version) ) //Exclude slf4j1.5.11 from the classpath, it's conflicting... override def fullClasspath(config: Configuration): PathFinder = { super.fullClasspath(config) --- - (super.fullClasspath(config) ** "slf4j*1.5.11.jar") + (super.fullClasspath(config) ** "slf4j*1.5.11.jar") } override def mainResources = super.mainResources +++ @@ -304,57 +317,60 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { //override def documentOptions = encodingUtf8.map(SimpleDocOption(_)) override def packageDocsJar = defaultJarPath("-docs.jar") - override def packageSrcJar= defaultJarPath("-sources.jar") + + override def packageSrcJar = defaultJarPath("-sources.jar") + override def packageToPublishActions = super.packageToPublishActions ++ Seq(packageDocs, packageSrc) override def pomExtra = 2009 - http://akkasource.org - - Scalable Solutions AB - http://scalablesolutions.se - - - - Apache 2 - http://www.apache.org/licenses/LICENSE-2.0.txt - repo - - + http://akkasource.org + + Scalable Solutions AB + http://scalablesolutions.se + + + + Apache 2 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + // publish to local mvn import Process._ lazy val publishLocalMvn = runMvnInstall + def runMvnInstall = task { for (absPath <- akkaArtifacts.getPaths) { val artifactRE = """(.*)/dist/(.*)-(.*).jar""".r val artifactRE(path, artifactId, artifactVersion) = absPath val command = "mvn install:install-file" + - " -Dfile=" + absPath + - " -DgroupId=se.scalablesolutions.akka" + - " -DartifactId=" + artifactId + - " -Dversion=" + version + - " -Dpackaging=jar -DgeneratePom=true" + " -Dfile=" + absPath + + " -DgroupId=se.scalablesolutions.akka" + + " -DartifactId=" + artifactId + + " -Dversion=" + version + + " -Dpackaging=jar -DgeneratePom=true" command ! log } None - } dependsOn(dist) describedAs("Run mvn install for artifacts in dist.") + } dependsOn (dist) describedAs ("Run mvn install for artifacts in dist.") // ------------------------------------------------------------------------------------------------------------------- // akka-actor subproject // ------------------------------------------------------------------------------------------------------------------- class AkkaActorProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val configgy = Dependencies.configgy - val hawtdispatch = Dependencies.hawtdispatch - val multiverse = Dependencies.multiverse - val jsr166x = Dependencies.jsr166x - val slf4j = Dependencies.slf4j - val logback = Dependencies.logback - val logback_core = Dependencies.logback_core + val configgy = Dependencies.configgy + val hawtdispatch = Dependencies.hawtdispatch + val multiverse = Dependencies.multiverse + val jsr166x = Dependencies.jsr166x + val slf4j = Dependencies.slf4j + val logback = Dependencies.logback + val logback_core = Dependencies.logback_core // testing - val junit = Dependencies.junit + val junit = Dependencies.junit val scalatest = Dependencies.scalatest } @@ -363,13 +379,13 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- class AkkaTypedActorProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val aopalliance = Dependencies.aopalliance - val werkz = Dependencies.werkz - val werkz_core = Dependencies.werkz_core - val guicey = Dependencies.guicey + val aopalliance = Dependencies.aopalliance + val werkz = Dependencies.werkz + val werkz_core = Dependencies.werkz_core + val guicey = Dependencies.guicey // testing - val junit = Dependencies.junit + val junit = Dependencies.junit val scalatest = Dependencies.scalatest } @@ -379,22 +395,22 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaRemoteProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { val commons_codec = Dependencies.commons_codec - val commons_io = Dependencies.commons_io + val commons_io = Dependencies.commons_io val dispatch_http = Dependencies.dispatch_http val dispatch_json = Dependencies.dispatch_json - val guicey = Dependencies.guicey - val h2_lzf = Dependencies.h2_lzf - val jackson = Dependencies.jackson - val jackson_core = Dependencies.jackson_core - val jgroups = Dependencies.jgroups - val jta_1_1 = Dependencies.jta_1_1 - val netty = Dependencies.netty - val protobuf = Dependencies.protobuf - val sbinary = Dependencies.sbinary - val sjson = Dependencies.sjson + val guicey = Dependencies.guicey + val h2_lzf = Dependencies.h2_lzf + val jackson = Dependencies.jackson + val jackson_core = Dependencies.jackson_core + val jgroups = Dependencies.jgroups + val jta_1_1 = Dependencies.jta_1_1 + val netty = Dependencies.netty + val protobuf = Dependencies.protobuf + val sbinary = Dependencies.sbinary + val sjson = Dependencies.sjson // testing - val junit = Dependencies.junit + val junit = Dependencies.junit val scalatest = Dependencies.scalatest override def bndImportPackage = "javax.transaction;version=1.1" :: super.bndImportPackage.toList @@ -406,13 +422,13 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaAMQPProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { val commons_io = Dependencies.commons_io - val rabbit = Dependencies.rabbit - val protobuf = Dependencies.protobuf + val rabbit = Dependencies.rabbit + val protobuf = Dependencies.protobuf // testing - val junit = Dependencies.junit + val junit = Dependencies.junit val multiverse = Dependencies.multiverse - val scalatest = Dependencies.scalatest + val scalatest = Dependencies.scalatest } // ------------------------------------------------------------------------------------------------------------------- @@ -420,28 +436,28 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- class AkkaHttpProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val annotation = Dependencies.annotation - val atmo = Dependencies.atmo - val atmo_jbossweb = Dependencies.atmo_jbossweb - val atmo_jersey = Dependencies.atmo_jersey - val atmo_runtime = Dependencies.atmo_runtime - val atmo_tomcat = Dependencies.atmo_tomcat - val atmo_weblogic = Dependencies.atmo_weblogic - val jetty = Dependencies.jetty - val jetty_util = Dependencies.jetty_util - val jetty_xml = Dependencies.jetty_xml - val jetty_servlet = Dependencies.jetty_servlet + val annotation = Dependencies.annotation + val atmo = Dependencies.atmo + val atmo_jbossweb = Dependencies.atmo_jbossweb + val atmo_jersey = Dependencies.atmo_jersey + val atmo_runtime = Dependencies.atmo_runtime + val atmo_tomcat = Dependencies.atmo_tomcat + val atmo_weblogic = Dependencies.atmo_weblogic + val jetty = Dependencies.jetty + val jetty_util = Dependencies.jetty_util + val jetty_xml = Dependencies.jetty_xml + val jetty_servlet = Dependencies.jetty_servlet val jackson_core_asl = Dependencies.jackson_core_asl - val jersey = Dependencies.jersey - val jersey_contrib = Dependencies.jersey_contrib - val jersey_json = Dependencies.jersey_json - val jersey_server = Dependencies.jersey_server - val jsr311 = Dependencies.jsr311 - val stax_api = Dependencies.stax_api + val jersey = Dependencies.jersey + val jersey_contrib = Dependencies.jersey_contrib + val jersey_json = Dependencies.jersey_json + val jersey_server = Dependencies.jersey_server + val jsr311 = Dependencies.jsr311 + val stax_api = Dependencies.stax_api // testing - val junit = Dependencies.junit - val mockito = Dependencies.mockito + val junit = Dependencies.junit + val mockito = Dependencies.mockito val scalatest = Dependencies.scalatest } @@ -476,7 +492,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaPersistenceCommonProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { val commons_pool = Dependencies.commons_pool - val thrift = Dependencies.thrift + val thrift = Dependencies.thrift } // ------------------------------------------------------------------------------------------------------------------- @@ -485,7 +501,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaRedisProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { val commons_codec = Dependencies.commons_codec - val redis = Dependencies.redis + val redis = Dependencies.redis override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil } @@ -506,30 +522,38 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- class AkkaCassandraProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val cassandra = Dependencies.cassandra + val cassandra = Dependencies.cassandra // testing val cassandra_clhm = Dependencies.cassandra_clhm - val commons_coll = Dependencies.commons_coll - val google_coll = Dependencies.google_coll - val high_scale = Dependencies.high_scale + val commons_coll = Dependencies.commons_coll + val google_coll = Dependencies.google_coll + val high_scale = Dependencies.high_scale override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil } - // ------------------------------------------------------------------------------------------------------------------- - // akka-persistence-voldemort subproject - // ------------------------------------------------------------------------------------------------------------------- + // ------------------------------------------------------------------------------------------------------------------- + // akka-persistence-voldemort subproject + // ------------------------------------------------------------------------------------------------------------------- - class AkkaVoldemortProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val voldemort = Dependencies.voldemort - val voldemort_contrib = Dependencies.voldemort_contrib + class AkkaVoldemortProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { + val voldemort = Dependencies.voldemort + val voldemort_contrib = Dependencies.voldemort_contrib + val voldemort_needs_log4j = Dependencies.voldemort_needs_log4j - //testing - val scalatest = Dependencies.scalatest - override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil - } + //testing + val scalatest = Dependencies.scalatest + val google_coll = Dependencies.google_coll + val jdom = Dependencies.jdom + val jetty = Dependencies.vold_jetty + val velocity = Dependencies.velocity + val bdb = Dependencies.bdb + val dbcp = Dependencies.dbcp + + override def testOptions = TestFilter((name: String) => name.endsWith("Suite")) :: Nil + } @@ -545,13 +569,13 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- class AkkaSpringProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val spring_beans = Dependencies.spring_beans + val spring_beans = Dependencies.spring_beans val spring_context = Dependencies.spring_context // testing val camel_spring = Dependencies.camel_spring - val junit = Dependencies.junit - val scalatest = Dependencies.scalatest + val junit = Dependencies.junit + val scalatest = Dependencies.scalatest } // ------------------------------------------------------------------------------------------------------------------- @@ -559,7 +583,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- class AkkaJTAProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val atomikos_transactions = Dependencies.atomikos_transactions + val atomikos_transactions = Dependencies.atomikos_transactions val atomikos_transactions_api = Dependencies.atomikos_transactions_api val atomikos_transactions_jta = Dependencies.atomikos_transactions_jta //val jta_1_1 = Dependencies.jta_1_1 @@ -575,15 +599,18 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { new AkkaOSGiDependenciesBundleProject(_), akka_kernel, akka_jta) // akka_kernel does not depend on akka_jta (why?) therefore we list akka_jta here lazy val akka_osgi_assembly = project("akka-osgi-assembly", "akka-osgi-assembly", new AkkaOSGiAssemblyProject(_), akka_osgi_dependencies_bundle, akka_remote, akka_amqp, akka_http, - akka_camel, akka_spring, akka_jta, akka_persistence.akka_persistence_common, - akka_persistence.akka_persistence_redis, akka_persistence.akka_persistence_mongo, - akka_persistence.akka_persistence_cassandra) + akka_camel, akka_spring, akka_jta, akka_persistence.akka_persistence_common, + akka_persistence.akka_persistence_redis, akka_persistence.akka_persistence_mongo, + akka_persistence.akka_persistence_cassandra) } class AkkaOSGiDependenciesBundleProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with BNDPlugin { override def bndClasspath = compileClasspath + override def bndPrivatePackage = Seq("") + override def bndImportPackage = Seq("*;resolution:=optional") + override def bndExportPackage = Seq( "org.aopalliance.*;version=1.0.0", @@ -611,36 +638,36 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { val scala_bundle = "com.weiglewilczek.scala-lang-osgi" % "scala-library" % buildScalaVersion % "compile" intransitive // Camel bundles - val camel_core = Dependencies.camel_core.intransitive + val camel_core = Dependencies.camel_core.intransitive val fusesource_commonman = "org.fusesource.commonman" % "commons-management" % "1.0" intransitive // Spring bundles - val spring_beans = Dependencies.spring_beans.intransitive - val spring_context = Dependencies.spring_context.intransitive - val spring_aop = "org.springframework" % "spring-aop" % SPRING_VERSION % "compile" intransitive - val spring_asm = "org.springframework" % "spring-asm" % SPRING_VERSION % "compile" intransitive - val spring_core = "org.springframework" % "spring-core" % SPRING_VERSION % "compile" intransitive + val spring_beans = Dependencies.spring_beans.intransitive + val spring_context = Dependencies.spring_context.intransitive + val spring_aop = "org.springframework" % "spring-aop" % SPRING_VERSION % "compile" intransitive + val spring_asm = "org.springframework" % "spring-asm" % SPRING_VERSION % "compile" intransitive + val spring_core = "org.springframework" % "spring-core" % SPRING_VERSION % "compile" intransitive val spring_expression = "org.springframework" % "spring-expression" % SPRING_VERSION % "compile" intransitive - val spring_jms = "org.springframework" % "spring-jms" % SPRING_VERSION % "compile" intransitive - val spring_tx = "org.springframework" % "spring-tx" % SPRING_VERSION % "compile" intransitive + val spring_jms = "org.springframework" % "spring-jms" % SPRING_VERSION % "compile" intransitive + val spring_tx = "org.springframework" % "spring-tx" % SPRING_VERSION % "compile" intransitive - val commons_codec = Dependencies.commons_codec.intransitive - val commons_io = Dependencies.commons_io.intransitive - val commons_pool = Dependencies.commons_pool.intransitive - val guicey = Dependencies.guicey.intransitive - val jackson = Dependencies.jackson.intransitive - val jackson_core = Dependencies.jackson_core.intransitive - val jsr311 = Dependencies.jsr311.intransitive - val jta_1_1 = Dependencies.jta_1_1.intransitive - val netty = Dependencies.netty.intransitive - val commons_fileupload = "commons-fileupload" % "commons-fileupload" % "1.2.1" % "compile" intransitive - val jms_1_1 = "org.apache.geronimo.specs" % "geronimo-jms_1.1_spec" % "1.1.1" % "compile" intransitive - val joda = "joda-time" % "joda-time" % "1.6" intransitive + val commons_codec = Dependencies.commons_codec.intransitive + val commons_io = Dependencies.commons_io.intransitive + val commons_pool = Dependencies.commons_pool.intransitive + val guicey = Dependencies.guicey.intransitive + val jackson = Dependencies.jackson.intransitive + val jackson_core = Dependencies.jackson_core.intransitive + val jsr311 = Dependencies.jsr311.intransitive + val jta_1_1 = Dependencies.jta_1_1.intransitive + val netty = Dependencies.netty.intransitive + val commons_fileupload = "commons-fileupload" % "commons-fileupload" % "1.2.1" % "compile" intransitive + val jms_1_1 = "org.apache.geronimo.specs" % "geronimo-jms_1.1_spec" % "1.1.1" % "compile" intransitive + val joda = "joda-time" % "joda-time" % "1.6" intransitive override def packageAction = task { val libs: Seq[Path] = managedClasspath(config("compile")).get.toSeq - val prjs: Seq[Path] = info.dependencies.toSeq.asInstanceOf[Seq[DefaultProject]] map { _.jarPath } + val prjs: Seq[Path] = info.dependencies.toSeq.asInstanceOf[Seq[DefaultProject]] map {_.jarPath} val all = libs ++ prjs val destination = outputPath / "bundles" FileUtilities.copyFlat(all, destination, log) @@ -691,7 +718,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { - + @@ -702,8 +729,8 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaSampleSecurityProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) { val commons_codec = Dependencies.commons_codec - val jsr250 = Dependencies.jsr250 - val jsr311 = Dependencies.jsr311 + val jsr250 = Dependencies.jsr250 + val jsr311 = Dependencies.jsr311 } class AkkaSampleOSGiProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with BNDPlugin { @@ -740,63 +767,71 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- def removeDupEntries(paths: PathFinder) = - Path.lazyPathFinder { - val mapped = paths.get map { p => (p.relativePath, p) } - (Map() ++ mapped).values.toList - } + Path.lazyPathFinder { + val mapped = paths.get map {p => (p.relativePath, p)} + (Map() ++ mapped).values.toList + } def allArtifacts = { Path.fromFile(buildScalaInstance.libraryJar) +++ - (removeDupEntries(runClasspath filter ClasspathUtilities.isArchive) +++ - ((outputPath ##) / defaultJarName) +++ - mainResources +++ - mainDependencies.scalaJars +++ - descendents(info.projectPath / "scripts", "run_akka.sh") +++ - descendents(info.projectPath / "scripts", "akka-init-script.sh") +++ - descendents(info.projectPath / "dist", "*.jar") +++ - descendents(info.projectPath / "deploy", "*.jar") +++ - descendents(path("lib") ##, "*.jar") +++ - descendents(configurationPath(Configurations.Compile) ##, "*.jar")) - .filter(jar => // remove redundant libs - !jar.toString.endsWith("stax-api-1.0.1.jar") || - !jar.toString.endsWith("scala-library-2.7.7.jar") - ) + (removeDupEntries(runClasspath filter ClasspathUtilities.isArchive) +++ + ((outputPath ##) / defaultJarName) +++ + mainResources +++ + mainDependencies.scalaJars +++ + descendents(info.projectPath / "scripts", "run_akka.sh") +++ + descendents(info.projectPath / "scripts", "akka-init-script.sh") +++ + descendents(info.projectPath / "dist", "*.jar") +++ + descendents(info.projectPath / "deploy", "*.jar") +++ + descendents(path("lib") ##, "*.jar") +++ + descendents(configurationPath(Configurations.Compile) ##, "*.jar")) + .filter(jar => // remove redundant libs + !jar.toString.endsWith("stax-api-1.0.1.jar") || + !jar.toString.endsWith("scala-library-2.7.7.jar") + ) } - def akkaArtifacts = descendents(info.projectPath / "dist", "*" + buildScalaVersion + "-" + version + ".jar") + def akkaArtifacts = descendents(info.projectPath / "dist", "*" + buildScalaVersion + "-" + version + ".jar") // ------------------------------------------------------------ class AkkaDefaultProject(info: ProjectInfo, val deployPath: Path) extends DefaultProject(info) with DeployProject with OSGiProject { lazy val sourceArtifact = Artifact(this.artifactID, "sources", "jar", Some("sources"), Nil, None) lazy val docsArtifact = Artifact(this.artifactID, "docs", "jar", Some("docs"), Nil, None) + override def runClasspath = super.runClasspath +++ (AkkaParentProject.this.info.projectPath / "config") + override def testClasspath = super.testClasspath +++ (AkkaParentProject.this.info.projectPath / "config") + override def packageDocsJar = this.defaultJarPath("-docs.jar") - override def packageSrcJar = this.defaultJarPath("-sources.jar") + + override def packageSrcJar = this.defaultJarPath("-sources.jar") + override def packageToPublishActions = super.packageToPublishActions ++ Seq(this.packageDocs, this.packageSrc) } } -trait DeployProject { self: BasicScalaProject => +trait DeployProject { + self: BasicScalaProject => // defines where the deployTask copies jars to def deployPath: Path - lazy val dist = deployTask(jarPath, packageDocsJar, packageSrcJar, deployPath, true, true, true) dependsOn( - `package`, packageDocs, packageSrc) describedAs("Deploying") + lazy val dist = deployTask(jarPath, packageDocsJar, packageSrcJar, deployPath, true, true, true) dependsOn ( + `package`, packageDocs, packageSrc) describedAs ("Deploying") + def deployTask(jar: Path, docs: Path, src: Path, toDir: Path, genJar: Boolean, genDocs: Boolean, genSource: Boolean) = task { def gen(jar: Path, toDir: Path, flag: Boolean, msg: String): Option[String] = - if (flag) { - log.info(msg + " " + jar) - FileUtilities.copyFile(jar, toDir / jar.name, log) - } else None + if (flag) { + log.info(msg + " " + jar) + FileUtilities.copyFile(jar, toDir / jar.name, log) + } else None gen(jar, toDir, genJar, "Deploying bits") orElse - gen(docs, toDir, genDocs, "Deploying docs") orElse - gen(src, toDir, genSource, "Deploying sources") + gen(docs, toDir, genDocs, "Deploying docs") orElse + gen(src, toDir, genSource, "Deploying sources") } } -trait OSGiProject extends BNDPlugin { self: DefaultProject => +trait OSGiProject extends BNDPlugin { + self: DefaultProject => override def bndExportPackage = Seq("se.scalablesolutions.akka.*;version=%s".format(projectVersion.value)) } From f8f4b26d948a43ca3f5eb8c9942d4f3a0c9945e0 Mon Sep 17 00:00:00 2001 From: ticktock Date: Wed, 15 Sep 2010 19:37:21 -0400 Subject: [PATCH 05/52] switched voldemort to log4j-over-slf4j --- project/build/AkkaProject.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index b74dfea6e2..baafa848f8 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -209,7 +209,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val voldemort = "voldemort" % "voldemort" % "0.81" % "compile" lazy val voldemort_contrib = "voldemort" % "voldemort-contrib" % "0.81" % "compile" - lazy val voldemort_needs_log4j = "log4j" % "log4j" % "1.2.16" % "compile" + lazy val voldemort_needs_log4j = "org.slf4j" % "log4j-over-slf4j" % SLF4J_VERSION % "compile" lazy val werkz = "org.codehaus.aspectwerkz" % "aspectwerkz-nodeps-jdk5" % ASPECTWERKZ_VERSION % "compile" lazy val werkz_core = "org.codehaus.aspectwerkz" % "aspectwerkz-jdk5" % ASPECTWERKZ_VERSION % "compile" From e8c88b5aa3585ddf09eee6b666f6eda79ede879c Mon Sep 17 00:00:00 2001 From: ticktock Date: Wed, 15 Sep 2010 19:37:54 -0400 Subject: [PATCH 06/52] Initial tests working with bdb backed voldemort, --- .../main/scala/VoldemortStorageBackend.scala | 3 +- .../src/test/resources/config/stores.xml | 10 ++-- .../src/test/scala/EmbeddedVoldemort.scala | 14 +++-- .../scala/VoldemortStorageBackendSuite.scala | 56 +++++++++++++------ 4 files changed, 54 insertions(+), 29 deletions(-) diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala index 6915a7bca8..f0912a411d 100644 --- a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala +++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala @@ -21,6 +21,7 @@ import collection.mutable.{Map, Set, HashSet, ArrayBuffer} import java.util.{Map => JMap} + private[akka] object VoldemortStorageBackend extends MapStorageBackend[Array[Byte], Array[Byte]] with VectorStorageBackend[Array[Byte]] with @@ -229,7 +230,7 @@ MapStorageBackend[Array[Byte], Array[Byte]] with def getIndexFromVectorValueKey(owner: String, key: Array[Byte]): Int = { val indexBytes = new Array[Byte](IntSerializer.bytesPerInt) - System.arraycopy(key, key.length - IntSerializer.bytesPerInt - 1, indexBytes, 0, IntSerializer.bytesPerInt) + System.arraycopy(key, key.length - IntSerializer.bytesPerInt , indexBytes, 0, IntSerializer.bytesPerInt) IntSerializer.fromBytes(indexBytes) } diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml index b0491ea9a4..78f1b1385a 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml +++ b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml @@ -6,7 +6,7 @@ 1 1 1 - memory + bdb client string @@ -23,7 +23,7 @@ 1 1 1 - memory + bdb client identity @@ -39,7 +39,7 @@ 1 1 1 - memory + bdb client string @@ -56,7 +56,7 @@ 1 1 1 - memory + bdb client identity @@ -72,7 +72,7 @@ 1 1 1 - memory + bdb client string diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala index 5631542a78..395825152e 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala @@ -1,29 +1,33 @@ package se.scalablesolutions.akka.persistence.voldemort import org.scalatest.matchers.ShouldMatchers -import se.scalablesolutions.akka.util.UUID import voldemort.server.{VoldemortServer, VoldemortConfig} import org.scalatest.{Suite, BeforeAndAfterAll, FunSuite} import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import voldemort.utils.Utils import java.io.File +import se.scalablesolutions.akka.util.{Logging, UUID} @RunWith(classOf[JUnitRunner]) -trait EmbeddedVoldemort extends BeforeAndAfterAll { +trait EmbeddedVoldemort extends BeforeAndAfterAll with Logging { this: Suite => var server: VoldemortServer = null override protected def beforeAll(): Unit = { try { - val dir = "./akka-persistence/akka-persistence-voldemort/src/test/resources" + val dir = "./akka-persistence/akka-persistence-voldemort/target/scala_2.8.0/test-resources" val home = new File(dir) + log.info("Creating Voldemort Config") val config = VoldemortConfig.loadFromVoldemortHome(home.getCanonicalPath) + log.info("Starting Voldemort") server = new VoldemortServer(config) - server.start + server.start + log.info("Started") } catch { - case e => e.printStackTrace + case e => log.error(e, "Error Starting Voldemort") + throw e } } diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala index 68f1ebed0f..d589e79f0c 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala @@ -7,46 +7,66 @@ import org.scalatest.junit.JUnitRunner import se.scalablesolutions.akka.persistence.voldemort.VoldemortStorageBackend._ import se.scalablesolutions.akka.util.{Logging, UUID} import collection.immutable.TreeSet +import VoldemortStorageBackendSuite._ @RunWith(classOf[JUnitRunner]) class VoldemortStorageBackendSuite extends FunSuite with ShouldMatchers with EmbeddedVoldemort with Logging { test("that ref storage and retrieval works") { - refClient.put("testRef", "testRefValue".getBytes("UTF-8")) - new String(refClient.getValue("testRef", Array.empty[Byte]), "UTF-8") should be("testRefValue") + val key = "testRef" + val value = "testRefValue" + val valueBytes = bytes(value) + refClient.delete(key) + refClient.getValue(key, empty) should be(empty) + refClient.put(key, valueBytes) + refClient.getValue(key) should be(valueBytes) } test("that map key storage and retrieval works") { - val mapKeys = new TreeSet[Array[Byte]] + "key1".getBytes - mapKeyClient.put("testMapKey", mapKeys) - val returned = mapKeyClient.getValue("testMapKey", new TreeSet[Array[Byte]]) - returned should equal(mapKeys) + val key = "testmapKey" + val mapKeys = new TreeSet[Array[Byte]] + bytes("key1") + mapKeyClient.delete(key) + mapKeyClient.getValue(key, emptySet) should equal(emptySet) + mapKeyClient.put(key, mapKeys) + mapKeyClient.getValue(key, emptySet) should equal(mapKeys) + } test("that map value storage and retrieval works") { - val key = "keyForTestingMapValueClient".getBytes("UTF-8") - val value = "value for testing map value client".getBytes("UTF-8") + val key = bytes("keyForTestingMapValueClient") + val value = bytes("value for testing map value client") mapValueClient.put(key, value) - mapValueClient.getValue(key) should equal(value) + mapValueClient.getValue(key, empty) should equal(value) } test("that vector size storage and retrieval works") { val key = "vectorKey" - vectorSizeClient.put(key, IntSerializer.toBytes(17)) - vectorSizeClient.getValue(key) should equal(IntSerializer.toBytes(17)) + val size = IntSerializer.toBytes(17) + vectorSizeClient.delete(key) + vectorSizeClient.getValue(key, empty) should equal(empty) + vectorSizeClient.put(key, size) + vectorSizeClient.getValue(key) should equal(size) } test("that vector value storage and retrieval works") { val key = "vectorValueKey" val index = 3 - val value = "some bytes".getBytes("UTF-8") + val value = bytes("some bytes") val vecKey = getVectorValueKey(key, index) - try{ - val idx = getIndexFromVectorValueKey(key, vecKey) + getIndexFromVectorValueKey(key, vecKey) should be(index) + vectorValueClient.delete(vecKey) + vectorValueClient.getValue(vecKey, empty) should equal(empty) vectorValueClient.put(vecKey, value) - vectorValueClient.get(vecKey) should equal(value) - } catch{ - case e => e.printStackTrace - } + vectorValueClient.getValue(vecKey) should equal(value) + } + +} + +object VoldemortStorageBackendSuite { + val empty = Array.empty[Byte] + val emptySet = new TreeSet[Array[Byte]] + + def bytes(value: String): Array[Byte] = { + value.getBytes("UTF-8") } } \ No newline at end of file From 0fd957ae32e52d4689f70aae1f5a0ce7fd46bd3c Mon Sep 17 00:00:00 2001 From: ticktock Date: Wed, 15 Sep 2010 20:20:56 -0400 Subject: [PATCH 07/52] more tests, working on map api --- .../main/scala/VoldemortStorageBackend.scala | 16 +++++----- .../scala/VoldemortStorageBackendSuite.scala | 30 +++++++++++++++++++ 2 files changed, 38 insertions(+), 8 deletions(-) diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala index f0912a411d..fb40614adc 100644 --- a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala +++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala @@ -21,7 +21,6 @@ import collection.mutable.{Map, Set, HashSet, ArrayBuffer} import java.util.{Map => JMap} - private[akka] object VoldemortStorageBackend extends MapStorageBackend[Array[Byte], Array[Byte]] with VectorStorageBackend[Array[Byte]] with @@ -78,16 +77,17 @@ MapStorageBackend[Array[Byte], Array[Byte]] with private def getKeyValues(keys: SortedSet[Array[Byte]]): List[(Array[Byte], Array[Byte])] = { val all: JMap[Array[Byte], Versioned[Array[Byte]]] = mapValueClient.getAll(JavaConversions.asIterable(keys)) - JavaConversions.asMap(all).foldLeft(new ArrayBuffer[(Array[Byte], Array[Byte])](all.size)) { - (buf, keyVal) => { - keyVal match { - case (key, versioned) => { + val buf = new ArrayBuffer[(Array[Byte], Array[Byte])](all.size) + JavaConversions.asMap(all).foreach { + (entry) => { + entry match { + case (key: Array[Byte], versioned: Versioned[Array[Byte]]) => { buf += key -> versioned.getValue } } - buf } - }.toList + } + buf.toList } def getMapStorageSizeFor(name: String): Int = { @@ -230,7 +230,7 @@ MapStorageBackend[Array[Byte], Array[Byte]] with def getIndexFromVectorValueKey(owner: String, key: Array[Byte]): Int = { val indexBytes = new Array[Byte](IntSerializer.bytesPerInt) - System.arraycopy(key, key.length - IntSerializer.bytesPerInt , indexBytes, 0, IntSerializer.bytesPerInt) + System.arraycopy(key, key.length - IntSerializer.bytesPerInt, indexBytes, 0, IntSerializer.bytesPerInt) IntSerializer.fromBytes(indexBytes) } diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala index d589e79f0c..0a2deb6463 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala @@ -21,6 +21,16 @@ class VoldemortStorageBackendSuite extends FunSuite with ShouldMatchers with Emb refClient.getValue(key) should be(valueBytes) } + test("PersistentRef apis function as expected") { + val key = "apiTestRef" + val value = "apiTestRefValue" + val valueBytes = bytes(value) + refClient.delete(key) + getRefStorageFor(key) should be(None) + insertRefStorageFor(key, valueBytes) + getRefStorageFor(key).get should equal(valueBytes) + } + test("that map key storage and retrieval works") { val key = "testmapKey" val mapKeys = new TreeSet[Array[Byte]] + bytes("key1") @@ -38,6 +48,26 @@ class VoldemortStorageBackendSuite extends FunSuite with ShouldMatchers with Emb mapValueClient.getValue(key, empty) should equal(value) } + + test("PersistentMap apis function as expected") { + val name = "theMap" + val key = bytes("mapkey") + val value = bytes("mapValue") + removeMapStorageFor(name,key) + removeMapStorageFor(name) + getMapStorageEntryFor(name,key) should be (None) + getMapStorageSizeFor(name) should be (0) + getMapStorageFor(name).length should be(0) + getMapStorageRangeFor(name,None,None,100).length should be (0) + insertMapStorageEntryFor(name,key,value) + getMapStorageEntryFor(name,key).get should equal(value) + getMapStorageSizeFor(name) should be (1) + getMapStorageFor(name).length should be(1) + getMapStorageRangeFor(name,None,None,100).length should be (1) + + + } + test("that vector size storage and retrieval works") { val key = "vectorKey" val size = IntSerializer.toBytes(17) From cb0bc2d6394725b0688e4677e2f286a9311ac968 Mon Sep 17 00:00:00 2001 From: ticktock Date: Wed, 15 Sep 2010 22:39:34 -0400 Subject: [PATCH 08/52] tests of PersistentRef,Map,Vector StorageBackend working --- .../main/scala/VoldemortStorageBackend.scala | 30 +++++---- .../scala/VoldemortStorageBackendSuite.scala | 64 ++++++++++++++++--- 2 files changed, 72 insertions(+), 22 deletions(-) diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala index fb40614adc..d0efd7347e 100644 --- a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala +++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala @@ -67,16 +67,20 @@ MapStorageBackend[Array[Byte], Array[Byte]] with def getMapStorageRangeFor(name: String, start: Option[Array[Byte]], finish: Option[Array[Byte]], count: Int): List[(Array[Byte], Array[Byte])] = { val allkeys: SortedSet[Array[Byte]] = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]) val range = allkeys.rangeImpl(start, finish).take(count) - getKeyValues(range) + getKeyValues(name, range) } def getMapStorageFor(name: String): List[(Array[Byte], Array[Byte])] = { val keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) - getKeyValues(keys) + getKeyValues(name, keys) } - private def getKeyValues(keys: SortedSet[Array[Byte]]): List[(Array[Byte], Array[Byte])] = { - val all: JMap[Array[Byte], Versioned[Array[Byte]]] = mapValueClient.getAll(JavaConversions.asIterable(keys)) + private def getKeyValues(name: String, keys: SortedSet[Array[Byte]]): List[(Array[Byte], Array[Byte])] = { + val all: JMap[Array[Byte], Versioned[Array[Byte]]] = + mapValueClient.getAll(JavaConversions.asIterable(keys.map { + mapKey => getKey(name, mapKey) + })) + val buf = new ArrayBuffer[(Array[Byte], Array[Byte])](all.size) JavaConversions.asMap(all).foreach { (entry) => { @@ -155,26 +159,26 @@ MapStorageBackend[Array[Byte], Array[Byte]] with } else { count } - val seq: IndexedSeq[Array[Byte]] = (st to st + cnt).map { + val seq: IndexedSeq[Array[Byte]] = (st until st + cnt).map { index => getVectorValueKey(name, index) } val all: JMap[Array[Byte], Versioned[Array[Byte]]] = vectorValueClient.getAll(JavaConversions.asIterable(seq)) - val buf = new ArrayBuffer[Array[Byte]](seq.size) + var storage = new ArrayBuffer[Array[Byte]](seq.size) + storage = storage.padTo(seq.size, Array.empty[Byte]) + var idx = 0; seq.foreach { key => { - val index = getIndexFromVectorValueKey(name, key) - var value: Array[Byte] = null if (all.containsKey(key)) { - value = all.get(key).getValue - } else { - value = Array.empty[Byte] + storage.update(idx, all.get(key).getValue) } - buf.update(index, value) + idx += 1 } } - buf.toList + log.info("StorageSize:" + storage.size) + log.info("SeqSize:" + seq.size) + storage.toList } diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala index 0a2deb6463..8906daa5fb 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala @@ -53,18 +53,33 @@ class VoldemortStorageBackendSuite extends FunSuite with ShouldMatchers with Emb val name = "theMap" val key = bytes("mapkey") val value = bytes("mapValue") - removeMapStorageFor(name,key) + removeMapStorageFor(name, key) removeMapStorageFor(name) - getMapStorageEntryFor(name,key) should be (None) - getMapStorageSizeFor(name) should be (0) + getMapStorageEntryFor(name, key) should be(None) + getMapStorageSizeFor(name) should be(0) getMapStorageFor(name).length should be(0) - getMapStorageRangeFor(name,None,None,100).length should be (0) - insertMapStorageEntryFor(name,key,value) - getMapStorageEntryFor(name,key).get should equal(value) - getMapStorageSizeFor(name) should be (1) - getMapStorageFor(name).length should be(1) - getMapStorageRangeFor(name,None,None,100).length should be (1) + getMapStorageRangeFor(name, None, None, 100).length should be(0) + insertMapStorageEntryFor(name, key, value) + + getMapStorageEntryFor(name, key).get should equal(value) + getMapStorageSizeFor(name) should be(1) + getMapStorageFor(name).length should be(1) + getMapStorageRangeFor(name, None, None, 100).length should be(1) + + removeMapStorageFor(name, key) + removeMapStorageFor(name) + getMapStorageEntryFor(name, key) should be(None) + getMapStorageSizeFor(name) should be(0) + getMapStorageFor(name).length should be(0) + getMapStorageRangeFor(name, None, None, 100).length should be(0) + + insertMapStorageEntriesFor(name, List(key -> value)) + + getMapStorageEntryFor(name, key).get should equal(value) + getMapStorageSizeFor(name) should be(1) + getMapStorageFor(name).length should be(1) + getMapStorageRangeFor(name, None, None, 100).length should be(1) } @@ -89,6 +104,37 @@ class VoldemortStorageBackendSuite extends FunSuite with ShouldMatchers with Emb vectorValueClient.getValue(vecKey) should equal(value) } + test("PersistentVector apis function as expected") { + val key = "vectorApiKey" + val value = bytes("Some bytes we want to store in a vector") + val updatedValue = bytes("Some updated bytes we want to store in a vector") + vectorSizeClient.delete(key) + vectorValueClient.delete(getVectorValueKey(key, 0)) + vectorValueClient.delete(getVectorValueKey(key, 1)) + getVectorStorageEntryFor(key, 0) should be(empty) + getVectorStorageEntryFor(key, 1) should be(empty) + getVectorStorageRangeFor(key, None, None, 1).head should be(empty) + + insertVectorStorageEntryFor(key, value) + //again + insertVectorStorageEntryFor(key, value) + + getVectorStorageEntryFor(key, 0) should be(value) + getVectorStorageEntryFor(key, 1) should be(value) + getVectorStorageRangeFor(key, None, None, 1).head should be(value) + getVectorStorageRangeFor(key, Some(1), None, 1).head should be(value) + getVectorStorageSizeFor(key) should be(2) + + updateVectorStorageEntryFor(key, 1, updatedValue) + + getVectorStorageEntryFor(key, 0) should be(value) + getVectorStorageEntryFor(key, 1) should be(updatedValue) + getVectorStorageRangeFor(key, None, None, 1).head should be(value) + getVectorStorageRangeFor(key, Some(1), None, 1).head should be(updatedValue) + getVectorStorageSizeFor(key) should be(2) + + } + } object VoldemortStorageBackendSuite { From beee516b3dcad8a63d14d0ca6ee8a5d9ff7fa3f4 Mon Sep 17 00:00:00 2001 From: ticktock Date: Thu, 16 Sep 2010 11:47:35 -0400 Subject: [PATCH 09/52] sorted set hand serialization and working actor test --- .../main/scala/VoldemortStorageBackend.scala | 73 ++++++-- .../src/test/resources/config/stores.xml | 2 +- .../scala/VoldemortPersistentActorSuite.scala | 176 ++++++++++++++++++ .../scala/VoldemortStorageBackendSuite.scala | 7 +- 4 files changed, 240 insertions(+), 18 deletions(-) create mode 100644 akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala index d0efd7347e..77fd7acedb 100644 --- a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala +++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala @@ -42,7 +42,7 @@ MapStorageBackend[Array[Byte], Array[Byte]] with } } var refClient: StoreClient[String, Array[Byte]] = storeClientFactory.getStoreClient(refStore) - var mapKeyClient: StoreClient[String, SortedSet[Array[Byte]]] = storeClientFactory.getStoreClient(mapKeyStore) + var mapKeyClient: StoreClient[String, Array[Byte]] = storeClientFactory.getStoreClient(mapKeyStore) var mapValueClient: StoreClient[Array[Byte], Array[Byte]] = storeClientFactory.getStoreClient(mapValueStore) var vectorSizeClient: StoreClient[String, Array[Byte]] = storeClientFactory.getStoreClient(vectorSizeStore) var vectorValueClient: StoreClient[Array[Byte], Array[Byte]] = storeClientFactory.getStoreClient(vectorValueStore) @@ -65,13 +65,13 @@ MapStorageBackend[Array[Byte], Array[Byte]] with } def getMapStorageRangeFor(name: String, start: Option[Array[Byte]], finish: Option[Array[Byte]], count: Int): List[(Array[Byte], Array[Byte])] = { - val allkeys: SortedSet[Array[Byte]] = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]) + val allkeys: SortedSet[Array[Byte]] = getMapKeys(name) val range = allkeys.rangeImpl(start, finish).take(count) getKeyValues(name, range) } def getMapStorageFor(name: String): List[(Array[Byte], Array[Byte])] = { - val keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) + val keys = getMapKeys(name) getKeyValues(name, keys) } @@ -95,7 +95,7 @@ MapStorageBackend[Array[Byte], Array[Byte]] with } def getMapStorageSizeFor(name: String): Int = { - val keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) + val keys = getMapKeys(name) keys.size } @@ -108,15 +108,15 @@ MapStorageBackend[Array[Byte], Array[Byte]] with } def removeMapStorageFor(name: String, key: Array[Byte]) = { - var keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) + var keys = getMapKeys(name) keys -= key - mapKeyClient.put(name, keys) + putMapKeys(name, keys) mapValueClient.delete(getKey(name, key)) } def removeMapStorageFor(name: String) = { - val keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) + val keys = getMapKeys(name) keys.foreach { key => mapValueClient.delete(getKey(name, key)) @@ -126,9 +126,9 @@ MapStorageBackend[Array[Byte], Array[Byte]] with def insertMapStorageEntryFor(name: String, key: Array[Byte], value: Array[Byte]) = { mapValueClient.put(getKey(name, key), value) - var keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) + var keys = getMapKeys(name) keys += key - mapKeyClient.put(name, keys) + putMapKeys(name, keys) } def insertMapStorageEntriesFor(name: String, entries: List[(Array[Byte], Array[Byte])]) = { @@ -138,9 +138,17 @@ MapStorageBackend[Array[Byte], Array[Byte]] with key } } - var keys = mapKeyClient.getValue(name, new TreeSet[Array[Byte]]()) + var keys = getMapKeys(name) keys ++= newKeys - mapKeyClient.put(name, keys) + putMapKeys(name, keys) + } + + def putMapKeys(name: String, keys: SortedSet[Array[Byte]]) = { + mapKeyClient.put(name, SortedSetSerializer.toBytes(keys)) + } + + def getMapKeys(name: String): SortedSet[Array[Byte]] = { + SortedSetSerializer.fromBytes(mapKeyClient.getValue(name, Array.empty[Byte])) } @@ -176,8 +184,7 @@ MapStorageBackend[Array[Byte], Array[Byte]] with idx += 1 } } - log.info("StorageSize:" + storage.size) - log.info("SeqSize:" + seq.size) + storage.toList } @@ -250,4 +257,44 @@ MapStorageBackend[Array[Byte], Array[Byte]] with def fromString(str: String) = str.toInt } + object SortedSetSerializer { + def toBytes(set: SortedSet[Array[Byte]]): Array[Byte] = { + val length = set.foldLeft(0) { + (total, bytes) => { + total + bytes.length + IntSerializer.bytesPerInt + } + } + val allBytes = new Array[Byte](length) + val written = set.foldLeft(0) { + (total, bytes) => { + val sizeBytes = IntSerializer.toBytes(bytes.length) + System.arraycopy(sizeBytes, 0, allBytes, total, sizeBytes.length) + System.arraycopy(bytes, 0, allBytes, total + sizeBytes.length, bytes.length) + total + sizeBytes.length + bytes.length + } + } + require(length == written, "Bytes Written Did not equal Calculated Length, written %d, length %d".format(written, length)) + allBytes + } + + def fromBytes(bytes: Array[Byte]): SortedSet[Array[Byte]] = { + var set = new TreeSet[Array[Byte]] + if (bytes.length > IntSerializer.bytesPerInt) { + var pos = 0 + while (pos < bytes.length) { + val lengthBytes = new Array[Byte](IntSerializer.bytesPerInt) + System.arraycopy(bytes, pos, lengthBytes, 0, IntSerializer.bytesPerInt) + pos += IntSerializer.bytesPerInt + val length = IntSerializer.fromBytes(lengthBytes) + val item = new Array[Byte](length) + System.arraycopy(bytes, pos, item, 0, length) + set = set + item + pos += length + } + } + set + } + + } + } \ No newline at end of file diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml index 78f1b1385a..f2dd6ac099 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml +++ b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml @@ -46,7 +46,7 @@ utf8 - java-serialization + identity diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala new file mode 100644 index 0000000000..ae575e1e96 --- /dev/null +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala @@ -0,0 +1,176 @@ +package se.scalablesolutions.akka.persistence.voldemort + +import org.scalatest.Spec +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterEach +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import se.scalablesolutions.akka.actor.{Transactor, Actor, ActorRef} +import Actor._ +import BankAccountActor._ + + +case class Balance(accountNo: String) +case class Debit(accountNo: String, amount: Int, failer: ActorRef) +case class MultiDebit(accountNo: String, amounts: List[Int], failer: ActorRef) +case class Credit(accountNo: String, amount: Int) +case class Log(start: Int, finish: Int) +case object LogSize + +object BankAccountActor { + val state = "accountState" + val tx = "txnLog" +} + +class BankAccountActor extends Transactor { + private lazy val accountState = VoldemortStorage.newMap(state) + private lazy val txnLog = VoldemortStorage.newVector(tx) + + import sjson.json.DefaultProtocol._ + import sjson.json.JsonSerialization._ + + def receive: Receive = { + // check balance + case Balance(accountNo) => + txnLog.add(("Balance:" + accountNo).getBytes) + self.reply( + accountState.get(accountNo.getBytes) + .map(frombinary[Int](_)) + .getOrElse(0)) + + // debit amount: can fail + case Debit(accountNo, amount, failer) => + txnLog.add(("Debit:" + accountNo + " " + amount).getBytes) + val m = accountState.get(accountNo.getBytes) + .map(frombinary[Int](_)) + .getOrElse(0) + + accountState.put(accountNo.getBytes, tobinary(m - amount)) + if (amount > m) failer !! "Failure" + + self.reply(m - amount) + + // many debits: can fail + // demonstrates true rollback even if multiple puts have been done + case MultiDebit(accountNo, amounts, failer) => + val sum = amounts.foldRight(0)(_ + _) + txnLog.add(("MultiDebit:" + accountNo + " " + sum).getBytes) + + val m = accountState.get(accountNo.getBytes) + .map(frombinary[Int](_)) + .getOrElse(0) + + var cbal = m + amounts.foreach { + amount => + accountState.put(accountNo.getBytes, tobinary(m - amount)) + cbal = cbal - amount + if (cbal < 0) failer !! "Failure" + } + + self.reply(m - sum) + + // credit amount + case Credit(accountNo, amount) => + txnLog.add(("Credit:" + accountNo + " " + amount).getBytes) + val m = accountState.get(accountNo.getBytes) + .map(frombinary[Int](_)) + .getOrElse(0) + + accountState.put(accountNo.getBytes, tobinary(m + amount)) + + self.reply(m + amount) + + case LogSize => + self.reply(txnLog.length) + + case Log(start, finish) => + self.reply(txnLog.slice(start, finish).map(new String(_))) + } +} + +@serializable class PersistentFailerActor extends Transactor { + def receive = { + case "Failure" => + throw new RuntimeException("Expected exception; to test fault-tolerance") + } +} + +@RunWith(classOf[JUnitRunner]) +class VoldemortPersistentActorSuite extends +Spec with + ShouldMatchers with + BeforeAndAfterEach with EmbeddedVoldemort { + import VoldemortStorageBackend._ + + + override def beforeEach { + removeMapStorageFor(state) + var size = getVectorStorageSizeFor(tx) + (0 to size).foreach { + index => { + vectorValueClient.delete(getVectorValueKey(tx, index)) + } + } + vectorSizeClient.delete(tx) + } + + override def afterEach { + beforeEach + } + + describe("successful debit") { + it("should debit successfully") { + val bactor = actorOf[BankAccountActor] + bactor.start + val failer = actorOf[PersistentFailerActor] + failer.start + bactor !! Credit("a-123", 5000) + bactor !! Debit("a-123", 3000, failer) + + (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(2000) + + bactor !! Credit("a-123", 7000) + (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(9000) + + bactor !! Debit("a-123", 8000, failer) + (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(1000) + + (bactor !! LogSize).get.asInstanceOf[Int] should equal(7) + (bactor !! Log(0, 7)).get.asInstanceOf[Iterable[String]].size should equal(7) + } + } + + describe("unsuccessful debit") { + it("debit should fail") { + val bactor = actorOf[BankAccountActor] + bactor.start + val failer = actorOf[PersistentFailerActor] + failer.start + bactor !! Credit("a-123", 5000) + (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) + evaluating { + bactor !! Debit("a-123", 7000, failer) + } should produce[Exception] + (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) + (bactor !! LogSize).get.asInstanceOf[Int] should equal(3) + } + } + + describe("unsuccessful multidebit") { + it("multidebit should fail") { + val bactor = actorOf[BankAccountActor] + bactor.start + val failer = actorOf[PersistentFailerActor] + failer.start + bactor !! Credit("a-123", 5000) + (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) + evaluating { + bactor !! MultiDebit("a-123", List(1000, 2000, 4000), failer) + } should produce[Exception] + (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) + (bactor !! LogSize).get.asInstanceOf[Int] should equal(3) + } + } +} diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala index 8906daa5fb..419bd05555 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala @@ -35,10 +35,9 @@ class VoldemortStorageBackendSuite extends FunSuite with ShouldMatchers with Emb val key = "testmapKey" val mapKeys = new TreeSet[Array[Byte]] + bytes("key1") mapKeyClient.delete(key) - mapKeyClient.getValue(key, emptySet) should equal(emptySet) - mapKeyClient.put(key, mapKeys) - mapKeyClient.getValue(key, emptySet) should equal(mapKeys) - + mapKeyClient.getValue(key, SortedSetSerializer.toBytes(emptySet)) should equal(SortedSetSerializer.toBytes(emptySet)) + putMapKeys(key, mapKeys) + getMapKeys(key) should equal(mapKeys) } test("that map value storage and retrieval works") { From 8464fd5251cec964d250fa8f19c3ce6203990b9f Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Fri, 17 Sep 2010 16:04:25 +0200 Subject: [PATCH 10/52] Aaaaalmost there... --- .../src/main/scala/actor/ActorRef.scala | 28 +- .../src/main/scala/actor/ActorRegistry.scala | 4 +- .../src/main/scala/actor/Implicits.scala | 5 + .../src/main/scala/dispatch/Dispatchers.scala | 5 +- .../main/scala/dispatch/MessageHandling.scala | 4 +- akka-actor/src/main/scala/stm/Ref.scala | 4 +- .../src/main/scala/stm/TransactionalMap.scala | 4 +- .../main/scala/stm/TransactionalVector.scala | 4 +- .../src/main/scala/util/AkkaException.scala | 5 +- .../main/scala/util/ReflectiveAccess.scala | 14 +- akka-actor/src/main/scala/util/UUID.scala | 9 - ...rBasedEventDrivenDispatcherActorSpec.scala | 4 +- .../src/main/scala/ConsumerPublisher.scala | 8 +- .../main/scala/component/ActorComponent.scala | 12 +- .../src/main/scala/CassandraStorage.scala | 8 +- .../src/main/scala/MongoStorage.scala | 8 +- .../src/main/scala/RedisStorage.scala | 12 +- .../akka/remote/protocol/RemoteProtocol.java | 515 +++++++++++------- .../src/main/protocol/RemoteProtocol.proto | 6 +- .../src/main/scala/remote/RemoteClient.scala | 35 +- .../src/main/scala/remote/RemoteServer.scala | 23 +- .../serialization/SerializationProtocol.scala | 19 +- .../src/test/scala/ticket/Ticket434Spec.scala | 6 +- .../src/main/scala/actor/TypedActor.scala | 2 +- 24 files changed, 448 insertions(+), 296 deletions(-) delete mode 100644 akka-actor/src/main/scala/util/UUID.scala diff --git a/akka-actor/src/main/scala/actor/ActorRef.scala b/akka-actor/src/main/scala/actor/ActorRef.scala index 4905e62670..597e6bb1d1 100644 --- a/akka-actor/src/main/scala/actor/ActorRef.scala +++ b/akka-actor/src/main/scala/actor/ActorRef.scala @@ -67,7 +67,7 @@ trait ActorRef extends java.lang.Comparable[ActorRef] { scalaRef: ScalaActorRef => // Only mutable for RemoteServer in order to maintain identity across nodes - @volatile protected[akka] var _uuid = UUID.newUuid.toString + @volatile protected[akka] var _uuid = newUuid @volatile protected[this] var _isRunning = false @volatile protected[this] var _isShutDown = false @volatile protected[akka] var _isBeingRestarted = false @@ -86,7 +86,7 @@ trait ActorRef extends * that you can use a custom name to be able to retrieve the "correct" persisted state * upon restart, remote restart etc. */ - @BeanProperty @volatile var id: String = _uuid + @BeanProperty @volatile var id: String = _uuid.toString /** * User overridable callback/setting. @@ -204,7 +204,7 @@ trait ActorRef extends /** * Comparison only takes uuid into account. */ - def compareTo(other: ActorRef) = this.uuid.compareTo(other.uuid) + def compareTo(other: ActorRef) = this.uuid compareTo other.uuid /** * Returns the uuid for the actor. @@ -249,7 +249,7 @@ trait ActorRef extends /** * Only for internal use. UUID is effectively final. */ - protected[akka] def uuid_=(uid: String) = _uuid = uid + protected[akka] def uuid_=(uid: Uuid) = _uuid = uid /** * Akka Java API @@ -605,9 +605,9 @@ trait ActorRef extends protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit - protected[akka] def registerSupervisorAsRemoteActor: Option[String] + protected[akka] def registerSupervisorAsRemoteActor: Option[Uuid] - protected[akka] def linkedActors: JMap[String, ActorRef] + protected[akka] def linkedActors: JMap[Uuid, ActorRef] protected[akka] def linkedActorsAsList: List[ActorRef] @@ -648,7 +648,7 @@ class LocalActorRef private[akka]( extends ActorRef with ScalaActorRef { @volatile private[akka] var _remoteAddress: Option[InetSocketAddress] = None // only mutable to maintain identity across nodes - @volatile private[akka] var _linkedActors: Option[ConcurrentHashMap[String, ActorRef]] = None + @volatile private[akka] var _linkedActors: Option[ConcurrentHashMap[Uuid, ActorRef]] = None @volatile private[akka] var _supervisor: Option[ActorRef] = None @volatile private var isInInitialization = false @volatile private var runActorInitialization = false @@ -670,7 +670,7 @@ class LocalActorRef private[akka]( private[akka] def this(factory: () => Actor) = this(Right(Some(factory))) // used only for deserialization - private[akka] def this(__uuid: String, + private[akka] def this(__uuid: Uuid, __id: String, __hostname: String, __port: Int, @@ -1096,7 +1096,7 @@ class LocalActorRef private[akka]( } } - protected[akka] def registerSupervisorAsRemoteActor: Option[String] = guard.withGuard { + protected[akka] def registerSupervisorAsRemoteActor: Option[Uuid] = guard.withGuard { ensureRemotingEnabled if (_supervisor.isDefined) { remoteAddress.foreach(address => RemoteClientModule.registerSupervisorForActor(address, this)) @@ -1104,9 +1104,9 @@ class LocalActorRef private[akka]( } else None } - protected[akka] def linkedActors: JMap[String, ActorRef] = guard.withGuard { + protected[akka] def linkedActors: JMap[Uuid, ActorRef] = guard.withGuard { if (_linkedActors.isEmpty) { - val actors = new ConcurrentHashMap[String, ActorRef] + val actors = new ConcurrentHashMap[Uuid, ActorRef] _linkedActors = Some(actors) actors } else _linkedActors.get @@ -1391,7 +1391,7 @@ private[akka] case class RemoteActorRef private[akka] ( */ def actorClassName: String = className - protected[akka] def registerSupervisorAsRemoteActor: Option[String] = None + protected[akka] def registerSupervisorAsRemoteActor: Option[Uuid] = None val remoteAddress: Option[InetSocketAddress] = Some(new InetSocketAddress(hostname, port)) @@ -1420,7 +1420,7 @@ private[akka] case class RemoteActorRef private[akka] ( protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit = unsupported protected[akka] def restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit = unsupported protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit = unsupported - protected[akka] def linkedActors: JMap[String, ActorRef] = unsupported + protected[akka] def linkedActors: JMap[Uuid, ActorRef] = unsupported protected[akka] def linkedActorsAsList: List[ActorRef] = unsupported protected[akka] def invoke(messageHandle: MessageInvocation): Unit = unsupported protected[akka] def remoteAddress_=(addr: Option[InetSocketAddress]): Unit = unsupported @@ -1443,7 +1443,7 @@ trait ActorRefShared { /** * Returns the uuid for the actor. */ - def uuid: String + def uuid: Uuid /** * Shuts down and removes all linked actors. diff --git a/akka-actor/src/main/scala/actor/ActorRegistry.scala b/akka-actor/src/main/scala/actor/ActorRegistry.scala index 51bbfd3477..e425451470 100644 --- a/akka-actor/src/main/scala/actor/ActorRegistry.scala +++ b/akka-actor/src/main/scala/actor/ActorRegistry.scala @@ -35,7 +35,7 @@ case class ActorUnregistered(actor: ActorRef) extends ActorRegistryEvent * @author Jonas Bonér */ object ActorRegistry extends ListenerManagement { - private val actorsByUUID = new ConcurrentHashMap[String, ActorRef] + private val actorsByUUID = new ConcurrentHashMap[Uuid, ActorRef] private val actorsById = new Index[String,ActorRef] /** @@ -112,7 +112,7 @@ object ActorRegistry extends ListenerManagement { /** * Finds the actor that has a specific UUID. */ - def actorFor(uuid: String): Option[ActorRef] = Option(actorsByUUID get uuid) + def actorFor(uuid: Uuid): Option[ActorRef] = Option(actorsByUUID get uuid) /** * Registers an actor in the ActorRegistry. diff --git a/akka-actor/src/main/scala/actor/Implicits.scala b/akka-actor/src/main/scala/actor/Implicits.scala index 604e5189b9..f228d45186 100644 --- a/akka-actor/src/main/scala/actor/Implicits.scala +++ b/akka-actor/src/main/scala/actor/Implicits.scala @@ -12,4 +12,9 @@ package object actor { implicit def scala2ActorRef(ref: ScalaActorRef): ActorRef = ref.asInstanceOf[ActorRef] + + type Uuid = com.eaio.uuid.UUID + def newUuid(): Uuid = new Uuid() + def uuidFrom(time: Long, clockSeqAndNode: Long): Uuid = new Uuid(time,clockSeqAndNode) + def uuidFrom(uuid: String) = new Uuid(uuid) } diff --git a/akka-actor/src/main/scala/dispatch/Dispatchers.scala b/akka-actor/src/main/scala/dispatch/Dispatchers.scala index 7e7904ec29..834a356954 100644 --- a/akka-actor/src/main/scala/dispatch/Dispatchers.scala +++ b/akka-actor/src/main/scala/dispatch/Dispatchers.scala @@ -9,7 +9,8 @@ import se.scalablesolutions.akka.config.Config.config import net.lag.configgy.ConfigMap import java.util.concurrent.ThreadPoolExecutor.{AbortPolicy, CallerRunsPolicy, DiscardOldestPolicy, DiscardPolicy} import java.util.concurrent.TimeUnit -import se.scalablesolutions.akka.util.{Duration, Logging, UUID} +import se.scalablesolutions.akka.util.{Duration, Logging} +import se.scalablesolutions.akka.actor.newUuid /** * Scala API. Dispatcher factory. @@ -171,7 +172,7 @@ object Dispatchers extends Logging { * Throws: IllegalArgumentException if the value of "type" is not valid */ def from(cfg: ConfigMap): Option[MessageDispatcher] = { - lazy val name = cfg.getString("name", UUID.newUuid.toString) + lazy val name = cfg.getString("name", newUuid.toString) def threadPoolConfig(b: ThreadPoolBuilder) { b.configureIfPossible( builder => { diff --git a/akka-actor/src/main/scala/dispatch/MessageHandling.scala b/akka-actor/src/main/scala/dispatch/MessageHandling.scala index 25a02f2603..dd96583dcb 100644 --- a/akka-actor/src/main/scala/dispatch/MessageHandling.scala +++ b/akka-actor/src/main/scala/dispatch/MessageHandling.scala @@ -10,7 +10,7 @@ import org.multiverse.commitbarriers.CountDownCommitBarrier import se.scalablesolutions.akka.AkkaException import java.util.{Queue, List} import java.util.concurrent._ -import concurrent.forkjoin.LinkedTransferQueue +import se.scalablesolutions.akka.actor.Uuid import se.scalablesolutions.akka.util.{SimpleLock, Duration, HashCode, Logging} /** @@ -119,7 +119,7 @@ class DefaultBoundedMessageQueue(capacity: Int, pushTimeOut: Option[Duration], b * @author Jonas Bonér */ trait MessageDispatcher extends Logging { - protected val uuids = new ConcurrentSkipListSet[String] + protected val uuids = new ConcurrentSkipListSet[Uuid] def dispatch(invocation: MessageInvocation) diff --git a/akka-actor/src/main/scala/stm/Ref.scala b/akka-actor/src/main/scala/stm/Ref.scala index b0ae15c957..fc81dbafa9 100644 --- a/akka-actor/src/main/scala/stm/Ref.scala +++ b/akka-actor/src/main/scala/stm/Ref.scala @@ -4,7 +4,7 @@ package se.scalablesolutions.akka.stm -import se.scalablesolutions.akka.util.UUID +import se.scalablesolutions.akka.actor.{newUuid, Uuid} import org.multiverse.transactional.refs.BasicRef @@ -34,7 +34,7 @@ class Ref[T](initialValue: T) extends BasicRef[T](initialValue) with Transaction def this() = this(null.asInstanceOf[T]) - val uuid = UUID.newUuid.toString + val uuid = newUuid.toString def swap(elem: T) = set(elem) diff --git a/akka-actor/src/main/scala/stm/TransactionalMap.scala b/akka-actor/src/main/scala/stm/TransactionalMap.scala index d45396ad25..457774f755 100644 --- a/akka-actor/src/main/scala/stm/TransactionalMap.scala +++ b/akka-actor/src/main/scala/stm/TransactionalMap.scala @@ -6,7 +6,7 @@ package se.scalablesolutions.akka.stm import scala.collection.immutable.HashMap -import se.scalablesolutions.akka.util.UUID +import se.scalablesolutions.akka.actor.{newUuid} import org.multiverse.api.ThreadLocalTransaction.getThreadLocalTransaction @@ -24,7 +24,7 @@ object TransactionalMap { class TransactionalMap[K, V](initialValue: HashMap[K, V]) extends Transactional with scala.collection.mutable.Map[K, V] { def this() = this(HashMap[K, V]()) - val uuid = UUID.newUuid.toString + val uuid = newUuid.toString private[this] val ref = Ref(initialValue) diff --git a/akka-actor/src/main/scala/stm/TransactionalVector.scala b/akka-actor/src/main/scala/stm/TransactionalVector.scala index 2beeeecef0..3004e97db8 100644 --- a/akka-actor/src/main/scala/stm/TransactionalVector.scala +++ b/akka-actor/src/main/scala/stm/TransactionalVector.scala @@ -6,7 +6,7 @@ package se.scalablesolutions.akka.stm import scala.collection.immutable.Vector -import se.scalablesolutions.akka.util.UUID +import se.scalablesolutions.akka.actor.newUuid import org.multiverse.api.ThreadLocalTransaction.getThreadLocalTransaction @@ -24,7 +24,7 @@ object TransactionalVector { class TransactionalVector[T](initialValue: Vector[T]) extends Transactional with IndexedSeq[T] { def this() = this(Vector[T]()) - val uuid = UUID.newUuid.toString + val uuid = newUuid.toString private[this] val ref = Ref(initialValue) diff --git a/akka-actor/src/main/scala/util/AkkaException.scala b/akka-actor/src/main/scala/util/AkkaException.scala index 3e28c17390..0eb06549b4 100644 --- a/akka-actor/src/main/scala/util/AkkaException.scala +++ b/akka-actor/src/main/scala/util/AkkaException.scala @@ -4,7 +4,8 @@ package se.scalablesolutions.akka -import se.scalablesolutions.akka.util.{UUID, Logging} +import se.scalablesolutions.akka.util.Logging +import se.scalablesolutions.akka.actor.newUuid import java.io.{StringWriter, PrintWriter} import java.net.{InetAddress, UnknownHostException} @@ -23,7 +24,7 @@ import java.net.{InetAddress, UnknownHostException} import AkkaException._ val exceptionName = getClass.getName - val uuid = "%s_%s".format(hostname, UUID.newUuid.toString) + val uuid = "%s_%s".format(hostname, newUuid) override val toString = "%s\n\t[%s]\n\t%s\n\t%s".format(exceptionName, uuid, message, stackTrace) diff --git a/akka-actor/src/main/scala/util/ReflectiveAccess.scala b/akka-actor/src/main/scala/util/ReflectiveAccess.scala index abccd5d9b0..e5daf2ca5a 100644 --- a/akka-actor/src/main/scala/util/ReflectiveAccess.scala +++ b/akka-actor/src/main/scala/util/ReflectiveAccess.scala @@ -7,7 +7,7 @@ package se.scalablesolutions.akka.util import se.scalablesolutions.akka.actor.{ActorRef, IllegalActorStateException, ActorType} import se.scalablesolutions.akka.dispatch.{Future, CompletableFuture} import se.scalablesolutions.akka.config.{Config, ModuleNotAvailableException} - +import se.scalablesolutions.akka.actor.Uuid import java.net.InetSocketAddress import se.scalablesolutions.akka.stm.Transaction import se.scalablesolutions.akka.AkkaException @@ -51,8 +51,8 @@ object ReflectiveAccess { } type RemoteClientObject = { - def register(hostname: String, port: Int, uuid: String): Unit - def unregister(hostname: String, port: Int, uuid: String): Unit + def register(hostname: String, port: Int, uuid: Uuid): Unit + def unregister(hostname: String, port: Int, uuid: Uuid): Unit def clientFor(address: InetSocketAddress): RemoteClient def clientFor(hostname: String, port: Int, loader: Option[ClassLoader]): RemoteClient } @@ -65,12 +65,12 @@ object ReflectiveAccess { val remoteClientObjectInstance: Option[RemoteClientObject] = getObject("se.scalablesolutions.akka.remote.RemoteClient$") - def register(address: InetSocketAddress, uuid: String) = { + def register(address: InetSocketAddress, uuid: Uuid) = { ensureRemotingEnabled remoteClientObjectInstance.get.register(address.getHostName, address.getPort, uuid) } - def unregister(address: InetSocketAddress, uuid: String) = { + def unregister(address: InetSocketAddress, uuid: Uuid) = { ensureRemotingEnabled remoteClientObjectInstance.get.unregister(address.getHostName, address.getPort, uuid) } @@ -112,7 +112,7 @@ object ReflectiveAccess { val PORT = Config.config.getInt("akka.remote.server.port", 9999) type RemoteServerObject = { - def registerActor(address: InetSocketAddress, uuid: String, actor: ActorRef): Unit + def registerActor(address: InetSocketAddress, uuid: Uuid, actor: ActorRef): Unit def registerTypedActor(address: InetSocketAddress, name: String, typedActor: AnyRef): Unit } @@ -126,7 +126,7 @@ object ReflectiveAccess { val remoteNodeObjectInstance: Option[RemoteNodeObject] = getObject("se.scalablesolutions.akka.remote.RemoteNode$") - def registerActor(address: InetSocketAddress, uuid: String, actorRef: ActorRef) = { + def registerActor(address: InetSocketAddress, uuid: Uuid, actorRef: ActorRef) = { ensureRemotingEnabled remoteServerObjectInstance.get.registerActor(address, uuid, actorRef) } diff --git a/akka-actor/src/main/scala/util/UUID.scala b/akka-actor/src/main/scala/util/UUID.scala deleted file mode 100644 index 548f014e58..0000000000 --- a/akka-actor/src/main/scala/util/UUID.scala +++ /dev/null @@ -1,9 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.util - -object UUID { - def newUuid = new com.eaio.uuid.UUID() -} \ No newline at end of file diff --git a/akka-actor/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala b/akka-actor/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala index 3935bc9b0b..95931df98b 100644 --- a/akka-actor/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala +++ b/akka-actor/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala @@ -10,7 +10,7 @@ import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger} object ExecutorBasedEventDrivenDispatcherActorSpec { class TestActor extends Actor { - self.dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher(self.uuid) + self.dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher(self.uuid.toString) def receive = { case "Hello" => self.reply("World") @@ -23,7 +23,7 @@ object ExecutorBasedEventDrivenDispatcherActorSpec { val oneWay = new CountDownLatch(1) } class OneWayTestActor extends Actor { - self.dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher(self.uuid) + self.dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher(self.uuid.toString) def receive = { case "OneWay" => OneWayTestActor.oneWay.countDown } diff --git a/akka-camel/src/main/scala/ConsumerPublisher.scala b/akka-camel/src/main/scala/ConsumerPublisher.scala index c0b64021af..472d7d6dad 100644 --- a/akka-camel/src/main/scala/ConsumerPublisher.scala +++ b/akka-camel/src/main/scala/ConsumerPublisher.scala @@ -31,7 +31,7 @@ private[camel] object ConsumerPublisher extends Logging { * Stops route to the already un-registered consumer actor. */ def handleConsumerUnregistered(event: ConsumerUnregistered) { - CamelContextManager.context.stopRoute(event.uuid) + CamelContextManager.context.stopRoute(event.uuid.toString) log.info("unpublished actor %s from endpoint %s" format (event.actorRef, event.uri)) } @@ -143,7 +143,7 @@ private[camel] abstract class ConsumerRoute(endpointUri: String, id: String) ext * * @author Martin Krasser */ -private[camel] class ConsumerActorRoute(endpointUri: String, uuid: String, blocking: Boolean) extends ConsumerRoute(endpointUri, uuid) { +private[camel] class ConsumerActorRoute(endpointUri: String, uuid: Uuid, blocking: Boolean) extends ConsumerRoute(endpointUri, uuid.toString) { protected override def targetUri = "actor:uuid:%s?blocking=%s" format (uuid, blocking) } @@ -229,7 +229,7 @@ private[camel] sealed trait ConsumerEvent * * @author Martin Krasser */ -private[camel] case class ConsumerRegistered(actorRef: ActorRef, uri: String, uuid: String, blocking: Boolean) extends ConsumerEvent +private[camel] case class ConsumerRegistered(actorRef: ActorRef, uri: String, uuid: Uuid, blocking: Boolean) extends ConsumerEvent /** * Event indicating that a consumer actor has been unregistered from the actor registry. @@ -240,7 +240,7 @@ private[camel] case class ConsumerRegistered(actorRef: ActorRef, uri: String, uu * * @author Martin Krasser */ -private[camel] case class ConsumerUnregistered(actorRef: ActorRef, uri: String, uuid: String) extends ConsumerEvent +private[camel] case class ConsumerUnregistered(actorRef: ActorRef, uri: String, uuid: Uuid) extends ConsumerEvent /** * Event indicating that an typed actor proxy has been created for a typed actor. For each @consume diff --git a/akka-camel/src/main/scala/component/ActorComponent.scala b/akka-camel/src/main/scala/component/ActorComponent.scala index 6c1c5902fa..ccd4c63f0c 100644 --- a/akka-camel/src/main/scala/component/ActorComponent.scala +++ b/akka-camel/src/main/scala/component/ActorComponent.scala @@ -18,7 +18,7 @@ import se.scalablesolutions.akka.camel.{Failure, CamelMessageConversion, Message import CamelMessageConversion.toExchangeAdapter import se.scalablesolutions.akka.dispatch.{CompletableFuture, MessageInvocation, MessageDispatcher} import se.scalablesolutions.akka.stm.TransactionConfig -import se.scalablesolutions.akka.actor.{ScalaActorRef, ActorRegistry, Actor, ActorRef} +import se.scalablesolutions.akka.actor.{ScalaActorRef, ActorRegistry, Actor, ActorRef, Uuid, uuidFrom} import se.scalablesolutions.akka.AkkaException import scala.reflect.BeanProperty @@ -37,11 +37,11 @@ class ActorComponent extends DefaultComponent { new ActorEndpoint(uri, this, idAndUuid._1, idAndUuid._2) } - private def idAndUuidPair(remaining: String): Tuple2[Option[String], Option[String]] = { + private def idAndUuidPair(remaining: String): Tuple2[Option[String], Option[Uuid]] = { remaining split ":" toList match { case id :: Nil => (Some(id), None) case "id" :: id :: Nil => (Some(id), None) - case "uuid" :: uuid :: Nil => (None, Some(uuid)) + case "uuid" :: uuid :: Nil => (None, Some(uuidFrom(uuid))) case _ => throw new IllegalArgumentException( "invalid path format: %s - should be or id: or uuid:" format remaining) } @@ -64,7 +64,7 @@ class ActorComponent extends DefaultComponent { class ActorEndpoint(uri: String, comp: ActorComponent, val id: Option[String], - val uuid: Option[String]) extends DefaultEndpoint(uri, comp) { + val uuid: Option[Uuid]) extends DefaultEndpoint(uri, comp) { /** * Blocking of caller thread during two-way message exchanges with consumer actors. This is set @@ -151,7 +151,7 @@ class ActorProducer(val ep: ActorEndpoint) extends DefaultProducer(ep) with Asyn case actors => Some(actors(0)) } - private def targetByUuid(uuid: String) = ActorRegistry.actorFor(uuid) + private def targetByUuid(uuid: Uuid) = ActorRegistry.actorFor(uuid) } /** @@ -250,7 +250,7 @@ private[akka] class AsyncCallbackAdapter(exchange: Exchange, callback: AsyncCall protected[akka] def restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit = unsupported protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit = unsupported protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit = unsupported - protected[akka] def linkedActors: JavaMap[String, ActorRef] = unsupported + protected[akka] def linkedActors: JavaMap[Uuid, ActorRef] = unsupported protected[akka] def linkedActorsAsList: List[ActorRef] = unsupported protected[akka] def invoke(messageHandle: MessageInvocation): Unit = unsupported protected[akka] def remoteAddress_=(addr: Option[InetSocketAddress]): Unit = unsupported diff --git a/akka-persistence/akka-persistence-cassandra/src/main/scala/CassandraStorage.scala b/akka-persistence/akka-persistence-cassandra/src/main/scala/CassandraStorage.scala index 0c6f239ef7..b3867b9492 100644 --- a/akka-persistence/akka-persistence-cassandra/src/main/scala/CassandraStorage.scala +++ b/akka-persistence/akka-persistence-cassandra/src/main/scala/CassandraStorage.scala @@ -4,16 +4,16 @@ package se.scalablesolutions.akka.persistence.cassandra -import se.scalablesolutions.akka.util.UUID import se.scalablesolutions.akka.stm._ import se.scalablesolutions.akka.persistence.common._ +import se.scalablesolutions.akka.actor.{newUuid} object CassandraStorage extends Storage { type ElementType = Array[Byte] - def newMap: PersistentMap[ElementType, ElementType] = newMap(UUID.newUuid.toString) - def newVector: PersistentVector[ElementType] = newVector(UUID.newUuid.toString) - def newRef: PersistentRef[ElementType] = newRef(UUID.newUuid.toString) + def newMap: PersistentMap[ElementType, ElementType] = newMap(newUuid.toString) + def newVector: PersistentVector[ElementType] = newVector(newUuid.toString) + def newRef: PersistentRef[ElementType] = newRef(newUuid.toString) def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id) def getVector(id: String): PersistentVector[ElementType] = newVector(id) diff --git a/akka-persistence/akka-persistence-mongo/src/main/scala/MongoStorage.scala b/akka-persistence/akka-persistence-mongo/src/main/scala/MongoStorage.scala index 83e47e3ba5..6bd99ee3e1 100644 --- a/akka-persistence/akka-persistence-mongo/src/main/scala/MongoStorage.scala +++ b/akka-persistence/akka-persistence-mongo/src/main/scala/MongoStorage.scala @@ -6,14 +6,14 @@ package se.scalablesolutions.akka.persistence.mongo import se.scalablesolutions.akka.stm._ import se.scalablesolutions.akka.persistence.common._ -import se.scalablesolutions.akka.util.UUID +import se.scalablesolutions.akka.actor.{newUuid} object MongoStorage extends Storage { type ElementType = Array[Byte] - def newMap: PersistentMap[ElementType, ElementType] = newMap(UUID.newUuid.toString) - def newVector: PersistentVector[ElementType] = newVector(UUID.newUuid.toString) - def newRef: PersistentRef[ElementType] = newRef(UUID.newUuid.toString) + def newMap: PersistentMap[ElementType, ElementType] = newMap(newUuid.toString) + def newVector: PersistentVector[ElementType] = newVector(newUuid.toString) + def newRef: PersistentRef[ElementType] = newRef(newUuid.toString) def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id) def getVector(id: String): PersistentVector[ElementType] = newVector(id) diff --git a/akka-persistence/akka-persistence-redis/src/main/scala/RedisStorage.scala b/akka-persistence/akka-persistence-redis/src/main/scala/RedisStorage.scala index 1eca775567..9055b6717c 100644 --- a/akka-persistence/akka-persistence-redis/src/main/scala/RedisStorage.scala +++ b/akka-persistence/akka-persistence-redis/src/main/scala/RedisStorage.scala @@ -4,18 +4,18 @@ package se.scalablesolutions.akka.persistence.redis -import se.scalablesolutions.akka.util.UUID +import se.scalablesolutions.akka.actor.{newUuid} import se.scalablesolutions.akka.stm._ import se.scalablesolutions.akka.persistence.common._ object RedisStorage extends Storage { type ElementType = Array[Byte] - def newMap: PersistentMap[ElementType, ElementType] = newMap(UUID.newUuid.toString) - def newVector: PersistentVector[ElementType] = newVector(UUID.newUuid.toString) - def newRef: PersistentRef[ElementType] = newRef(UUID.newUuid.toString) - override def newQueue: PersistentQueue[ElementType] = newQueue(UUID.newUuid.toString) - override def newSortedSet: PersistentSortedSet[ElementType] = newSortedSet(UUID.newUuid.toString) + def newMap: PersistentMap[ElementType, ElementType] = newMap(newUuid.toString) + def newVector: PersistentVector[ElementType] = newVector(newUuid.toString) + def newRef: PersistentRef[ElementType] = newRef(newUuid.toString) + override def newQueue: PersistentQueue[ElementType] = newQueue(newUuid.toString) + override def newSortedSet: PersistentSortedSet[ElementType] = newSortedSet(newUuid.toString) def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id) def getVector(id: String): PersistentVector[ElementType] = newVector(id) diff --git a/akka-remote/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java b/akka-remote/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java index ab05937f61..bc7e21a870 100644 --- a/akka-remote/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java +++ b/akka-remote/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java @@ -244,12 +244,12 @@ public final class RemoteProtocol { return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_RemoteActorRefProtocol_fieldAccessorTable; } - // required string uuid = 1; - public static final int UUID_FIELD_NUMBER = 1; - private boolean hasUuid; - private java.lang.String uuid_ = ""; - public boolean hasUuid() { return hasUuid; } - public java.lang.String getUuid() { return uuid_; } + // required string classOrServiceName = 1; + public static final int CLASSORSERVICENAME_FIELD_NUMBER = 1; + private boolean hasClassOrServiceName; + private java.lang.String classOrServiceName_ = ""; + public boolean hasClassOrServiceName() { return hasClassOrServiceName; } + public java.lang.String getClassOrServiceName() { return classOrServiceName_; } // required string actorClassname = 2; public static final int ACTORCLASSNAME_FIELD_NUMBER = 2; @@ -276,7 +276,7 @@ public final class RemoteProtocol { homeAddress_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.getDefaultInstance(); } public final boolean isInitialized() { - if (!hasUuid) return false; + if (!hasClassOrServiceName) return false; if (!hasActorClassname) return false; if (!hasHomeAddress) return false; if (!getHomeAddress().isInitialized()) return false; @@ -286,8 +286,8 @@ public final class RemoteProtocol { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - if (hasUuid()) { - output.writeString(1, getUuid()); + if (hasClassOrServiceName()) { + output.writeString(1, getClassOrServiceName()); } if (hasActorClassname()) { output.writeString(2, getActorClassname()); @@ -307,9 +307,9 @@ public final class RemoteProtocol { if (size != -1) return size; size = 0; - if (hasUuid()) { + if (hasClassOrServiceName()) { size += com.google.protobuf.CodedOutputStream - .computeStringSize(1, getUuid()); + .computeStringSize(1, getClassOrServiceName()); } if (hasActorClassname()) { size += com.google.protobuf.CodedOutputStream @@ -481,8 +481,8 @@ public final class RemoteProtocol { public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol other) { if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.getDefaultInstance()) return this; - if (other.hasUuid()) { - setUuid(other.getUuid()); + if (other.hasClassOrServiceName()) { + setClassOrServiceName(other.getClassOrServiceName()); } if (other.hasActorClassname()) { setActorClassname(other.getActorClassname()); @@ -519,7 +519,7 @@ public final class RemoteProtocol { break; } case 10: { - setUuid(input.readString()); + setClassOrServiceName(input.readString()); break; } case 18: { @@ -544,24 +544,24 @@ public final class RemoteProtocol { } - // required string uuid = 1; - public boolean hasUuid() { - return result.hasUuid(); + // required string classOrServiceName = 1; + public boolean hasClassOrServiceName() { + return result.hasClassOrServiceName(); } - public java.lang.String getUuid() { - return result.getUuid(); + public java.lang.String getClassOrServiceName() { + return result.getClassOrServiceName(); } - public Builder setUuid(java.lang.String value) { + public Builder setClassOrServiceName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - result.hasUuid = true; - result.uuid_ = value; + result.hasClassOrServiceName = true; + result.classOrServiceName_ = value; return this; } - public Builder clearUuid() { - result.hasUuid = false; - result.uuid_ = getDefaultInstance().getUuid(); + public Builder clearClassOrServiceName() { + result.hasClassOrServiceName = false; + result.classOrServiceName_ = getDefaultInstance().getClassOrServiceName(); return this; } @@ -680,12 +680,12 @@ public final class RemoteProtocol { return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_SerializedActorRefProtocol_fieldAccessorTable; } - // required string uuid = 1; + // required .UuidProtocol uuid = 1; public static final int UUID_FIELD_NUMBER = 1; private boolean hasUuid; - private java.lang.String uuid_ = ""; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol uuid_; public boolean hasUuid() { return hasUuid; } - public java.lang.String getUuid() { return uuid_; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol getUuid() { return uuid_; } // required string id = 2; public static final int ID_FIELD_NUMBER = 2; @@ -777,6 +777,7 @@ public final class RemoteProtocol { } private void initFields() { + uuid_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance(); originalAddress_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.getDefaultInstance(); lifeCycle_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.getDefaultInstance(); supervisor_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.getDefaultInstance(); @@ -786,6 +787,7 @@ public final class RemoteProtocol { if (!hasId) return false; if (!hasActorClassname) return false; if (!hasOriginalAddress) return false; + if (!getUuid().isInitialized()) return false; if (!getOriginalAddress().isInitialized()) return false; if (hasLifeCycle()) { if (!getLifeCycle().isInitialized()) return false; @@ -803,7 +805,7 @@ public final class RemoteProtocol { throws java.io.IOException { getSerializedSize(); if (hasUuid()) { - output.writeString(1, getUuid()); + output.writeMessage(1, getUuid()); } if (hasId()) { output.writeString(2, getId()); @@ -852,7 +854,7 @@ public final class RemoteProtocol { size = 0; if (hasUuid()) { size += com.google.protobuf.CodedOutputStream - .computeStringSize(1, getUuid()); + .computeMessageSize(1, getUuid()); } if (hasId()) { size += com.google.protobuf.CodedOutputStream @@ -1065,7 +1067,7 @@ public final class RemoteProtocol { public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol other) { if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol.getDefaultInstance()) return this; if (other.hasUuid()) { - setUuid(other.getUuid()); + mergeUuid(other.getUuid()); } if (other.hasId()) { setId(other.getId()); @@ -1132,7 +1134,12 @@ public final class RemoteProtocol { break; } case 10: { - setUuid(input.readString()); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.newBuilder(); + if (hasUuid()) { + subBuilder.mergeFrom(getUuid()); + } + input.readMessage(subBuilder, extensionRegistry); + setUuid(subBuilder.buildPartial()); break; } case 18: { @@ -1205,24 +1212,40 @@ public final class RemoteProtocol { } - // required string uuid = 1; + // required .UuidProtocol uuid = 1; public boolean hasUuid() { return result.hasUuid(); } - public java.lang.String getUuid() { + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol getUuid() { return result.getUuid(); } - public Builder setUuid(java.lang.String value) { + public Builder setUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol value) { if (value == null) { - throw new NullPointerException(); - } - result.hasUuid = true; + throw new NullPointerException(); + } + result.hasUuid = true; result.uuid_ = value; return this; } + public Builder setUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.Builder builderForValue) { + result.hasUuid = true; + result.uuid_ = builderForValue.build(); + return this; + } + public Builder mergeUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol value) { + if (result.hasUuid() && + result.uuid_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance()) { + result.uuid_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.newBuilder(result.uuid_).mergeFrom(value).buildPartial(); + } else { + result.uuid_ = value; + } + result.hasUuid = true; + return this; + } public Builder clearUuid() { result.hasUuid = false; - result.uuid_ = getDefaultInstance().getUuid(); + result.uuid_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance(); return this; } @@ -1966,12 +1989,12 @@ public final class RemoteProtocol { return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_ActorInfoProtocol_fieldAccessorTable; } - // required string uuid = 1; + // required .UuidProtocol uuid = 1; public static final int UUID_FIELD_NUMBER = 1; private boolean hasUuid; - private java.lang.String uuid_ = ""; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol uuid_; public boolean hasUuid() { return hasUuid; } - public java.lang.String getUuid() { return uuid_; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol getUuid() { return uuid_; } // required string target = 2; public static final int TARGET_FIELD_NUMBER = 2; @@ -2009,6 +2032,7 @@ public final class RemoteProtocol { public java.lang.String getId() { return id_; } private void initFields() { + uuid_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance(); actorType_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType.SCALA_ACTOR; typedActorInfo_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.getDefaultInstance(); } @@ -2017,6 +2041,7 @@ public final class RemoteProtocol { if (!hasTarget) return false; if (!hasTimeout) return false; if (!hasActorType) return false; + if (!getUuid().isInitialized()) return false; if (hasTypedActorInfo()) { if (!getTypedActorInfo().isInitialized()) return false; } @@ -2027,7 +2052,7 @@ public final class RemoteProtocol { throws java.io.IOException { getSerializedSize(); if (hasUuid()) { - output.writeString(1, getUuid()); + output.writeMessage(1, getUuid()); } if (hasTarget()) { output.writeString(2, getTarget()); @@ -2055,7 +2080,7 @@ public final class RemoteProtocol { size = 0; if (hasUuid()) { size += com.google.protobuf.CodedOutputStream - .computeStringSize(1, getUuid()); + .computeMessageSize(1, getUuid()); } if (hasTarget()) { size += com.google.protobuf.CodedOutputStream @@ -2236,7 +2261,7 @@ public final class RemoteProtocol { public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol other) { if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.getDefaultInstance()) return this; if (other.hasUuid()) { - setUuid(other.getUuid()); + mergeUuid(other.getUuid()); } if (other.hasTarget()) { setTarget(other.getTarget()); @@ -2279,7 +2304,12 @@ public final class RemoteProtocol { break; } case 10: { - setUuid(input.readString()); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.newBuilder(); + if (hasUuid()) { + subBuilder.mergeFrom(getUuid()); + } + input.readMessage(subBuilder, extensionRegistry); + setUuid(subBuilder.buildPartial()); break; } case 18: { @@ -2318,24 +2348,40 @@ public final class RemoteProtocol { } - // required string uuid = 1; + // required .UuidProtocol uuid = 1; public boolean hasUuid() { return result.hasUuid(); } - public java.lang.String getUuid() { + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol getUuid() { return result.getUuid(); } - public Builder setUuid(java.lang.String value) { + public Builder setUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol value) { if (value == null) { - throw new NullPointerException(); - } - result.hasUuid = true; + throw new NullPointerException(); + } + result.hasUuid = true; result.uuid_ = value; return this; } + public Builder setUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.Builder builderForValue) { + result.hasUuid = true; + result.uuid_ = builderForValue.build(); + return this; + } + public Builder mergeUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol value) { + if (result.hasUuid() && + result.uuid_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance()) { + result.uuid_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.newBuilder(result.uuid_).mergeFrom(value).buildPartial(); + } else { + result.uuid_ = value; + } + result.hasUuid = true; + return this; + } public Builder clearUuid() { result.hasUuid = false; - result.uuid_ = getDefaultInstance().getUuid(); + result.uuid_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance(); return this; } @@ -2827,12 +2873,12 @@ public final class RemoteProtocol { return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_RemoteRequestProtocol_fieldAccessorTable; } - // required uint64 id = 1; - public static final int ID_FIELD_NUMBER = 1; - private boolean hasId; - private long id_ = 0L; - public boolean hasId() { return hasId; } - public long getId() { return id_; } + // required .UuidProtocol uuid = 1; + public static final int UUID_FIELD_NUMBER = 1; + private boolean hasUuid; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol uuid_; + public boolean hasUuid() { return hasUuid; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol getUuid() { return uuid_; } // required .MessageProtocol message = 2; public static final int MESSAGE_FIELD_NUMBER = 2; @@ -2855,12 +2901,12 @@ public final class RemoteProtocol { public boolean hasIsOneWay() { return hasIsOneWay; } public boolean getIsOneWay() { return isOneWay_; } - // optional string supervisorUuid = 5; + // optional .UuidProtocol supervisorUuid = 5; public static final int SUPERVISORUUID_FIELD_NUMBER = 5; private boolean hasSupervisorUuid; - private java.lang.String supervisorUuid_ = ""; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol supervisorUuid_; public boolean hasSupervisorUuid() { return hasSupervisorUuid; } - public java.lang.String getSupervisorUuid() { return supervisorUuid_; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol getSupervisorUuid() { return supervisorUuid_; } // optional .RemoteActorRefProtocol sender = 6; public static final int SENDER_FIELD_NUMBER = 6; @@ -2882,17 +2928,23 @@ public final class RemoteProtocol { } private void initFields() { + uuid_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance(); message_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.getDefaultInstance(); actorInfo_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.getDefaultInstance(); + supervisorUuid_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance(); sender_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.getDefaultInstance(); } public final boolean isInitialized() { - if (!hasId) return false; + if (!hasUuid) return false; if (!hasMessage) return false; if (!hasActorInfo) return false; if (!hasIsOneWay) return false; + if (!getUuid().isInitialized()) return false; if (!getMessage().isInitialized()) return false; if (!getActorInfo().isInitialized()) return false; + if (hasSupervisorUuid()) { + if (!getSupervisorUuid().isInitialized()) return false; + } if (hasSender()) { if (!getSender().isInitialized()) return false; } @@ -2905,8 +2957,8 @@ public final class RemoteProtocol { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - if (hasId()) { - output.writeUInt64(1, getId()); + if (hasUuid()) { + output.writeMessage(1, getUuid()); } if (hasMessage()) { output.writeMessage(2, getMessage()); @@ -2918,7 +2970,7 @@ public final class RemoteProtocol { output.writeBool(4, getIsOneWay()); } if (hasSupervisorUuid()) { - output.writeString(5, getSupervisorUuid()); + output.writeMessage(5, getSupervisorUuid()); } if (hasSender()) { output.writeMessage(6, getSender()); @@ -2935,9 +2987,9 @@ public final class RemoteProtocol { if (size != -1) return size; size = 0; - if (hasId()) { + if (hasUuid()) { size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, getId()); + .computeMessageSize(1, getUuid()); } if (hasMessage()) { size += com.google.protobuf.CodedOutputStream @@ -2953,7 +3005,7 @@ public final class RemoteProtocol { } if (hasSupervisorUuid()) { size += com.google.protobuf.CodedOutputStream - .computeStringSize(5, getSupervisorUuid()); + .computeMessageSize(5, getSupervisorUuid()); } if (hasSender()) { size += com.google.protobuf.CodedOutputStream @@ -3125,8 +3177,8 @@ public final class RemoteProtocol { public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol other) { if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.getDefaultInstance()) return this; - if (other.hasId()) { - setId(other.getId()); + if (other.hasUuid()) { + mergeUuid(other.getUuid()); } if (other.hasMessage()) { mergeMessage(other.getMessage()); @@ -3138,7 +3190,7 @@ public final class RemoteProtocol { setIsOneWay(other.getIsOneWay()); } if (other.hasSupervisorUuid()) { - setSupervisorUuid(other.getSupervisorUuid()); + mergeSupervisorUuid(other.getSupervisorUuid()); } if (other.hasSender()) { mergeSender(other.getSender()); @@ -3174,8 +3226,13 @@ public final class RemoteProtocol { } break; } - case 8: { - setId(input.readUInt64()); + case 10: { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.newBuilder(); + if (hasUuid()) { + subBuilder.mergeFrom(getUuid()); + } + input.readMessage(subBuilder, extensionRegistry); + setUuid(subBuilder.buildPartial()); break; } case 18: { @@ -3201,7 +3258,12 @@ public final class RemoteProtocol { break; } case 42: { - setSupervisorUuid(input.readString()); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.newBuilder(); + if (hasSupervisorUuid()) { + subBuilder.mergeFrom(getSupervisorUuid()); + } + input.readMessage(subBuilder, extensionRegistry); + setSupervisorUuid(subBuilder.buildPartial()); break; } case 50: { @@ -3224,21 +3286,40 @@ public final class RemoteProtocol { } - // required uint64 id = 1; - public boolean hasId() { - return result.hasId(); + // required .UuidProtocol uuid = 1; + public boolean hasUuid() { + return result.hasUuid(); } - public long getId() { - return result.getId(); + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol getUuid() { + return result.getUuid(); } - public Builder setId(long value) { - result.hasId = true; - result.id_ = value; + public Builder setUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasUuid = true; + result.uuid_ = value; return this; } - public Builder clearId() { - result.hasId = false; - result.id_ = 0L; + public Builder setUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.Builder builderForValue) { + result.hasUuid = true; + result.uuid_ = builderForValue.build(); + return this; + } + public Builder mergeUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol value) { + if (result.hasUuid() && + result.uuid_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance()) { + result.uuid_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.newBuilder(result.uuid_).mergeFrom(value).buildPartial(); + } else { + result.uuid_ = value; + } + result.hasUuid = true; + return this; + } + public Builder clearUuid() { + result.hasUuid = false; + result.uuid_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance(); return this; } @@ -3334,24 +3415,40 @@ public final class RemoteProtocol { return this; } - // optional string supervisorUuid = 5; + // optional .UuidProtocol supervisorUuid = 5; public boolean hasSupervisorUuid() { return result.hasSupervisorUuid(); } - public java.lang.String getSupervisorUuid() { + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol getSupervisorUuid() { return result.getSupervisorUuid(); } - public Builder setSupervisorUuid(java.lang.String value) { + public Builder setSupervisorUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol value) { if (value == null) { - throw new NullPointerException(); - } - result.hasSupervisorUuid = true; + throw new NullPointerException(); + } + result.hasSupervisorUuid = true; result.supervisorUuid_ = value; return this; } + public Builder setSupervisorUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.Builder builderForValue) { + result.hasSupervisorUuid = true; + result.supervisorUuid_ = builderForValue.build(); + return this; + } + public Builder mergeSupervisorUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol value) { + if (result.hasSupervisorUuid() && + result.supervisorUuid_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance()) { + result.supervisorUuid_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.newBuilder(result.supervisorUuid_).mergeFrom(value).buildPartial(); + } else { + result.supervisorUuid_ = value; + } + result.hasSupervisorUuid = true; + return this; + } public Builder clearSupervisorUuid() { result.hasSupervisorUuid = false; - result.supervisorUuid_ = getDefaultInstance().getSupervisorUuid(); + result.supervisorUuid_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance(); return this; } @@ -3482,12 +3579,12 @@ public final class RemoteProtocol { return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_RemoteReplyProtocol_fieldAccessorTable; } - // required uint64 id = 1; - public static final int ID_FIELD_NUMBER = 1; - private boolean hasId; - private long id_ = 0L; - public boolean hasId() { return hasId; } - public long getId() { return id_; } + // required .UuidProtocol uuid = 1; + public static final int UUID_FIELD_NUMBER = 1; + private boolean hasUuid; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol uuid_; + public boolean hasUuid() { return hasUuid; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol getUuid() { return uuid_; } // optional .MessageProtocol message = 2; public static final int MESSAGE_FIELD_NUMBER = 2; @@ -3503,12 +3600,12 @@ public final class RemoteProtocol { public boolean hasException() { return hasException; } public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol getException() { return exception_; } - // optional string supervisorUuid = 4; + // optional .UuidProtocol supervisorUuid = 4; public static final int SUPERVISORUUID_FIELD_NUMBER = 4; private boolean hasSupervisorUuid; - private java.lang.String supervisorUuid_ = ""; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol supervisorUuid_; public boolean hasSupervisorUuid() { return hasSupervisorUuid; } - public java.lang.String getSupervisorUuid() { return supervisorUuid_; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol getSupervisorUuid() { return supervisorUuid_; } // required bool isActor = 5; public static final int ISACTOR_FIELD_NUMBER = 5; @@ -3537,19 +3634,25 @@ public final class RemoteProtocol { } private void initFields() { + uuid_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance(); message_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.getDefaultInstance(); exception_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol.getDefaultInstance(); + supervisorUuid_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance(); } public final boolean isInitialized() { - if (!hasId) return false; + if (!hasUuid) return false; if (!hasIsActor) return false; if (!hasIsSuccessful) return false; + if (!getUuid().isInitialized()) return false; if (hasMessage()) { if (!getMessage().isInitialized()) return false; } if (hasException()) { if (!getException().isInitialized()) return false; } + if (hasSupervisorUuid()) { + if (!getSupervisorUuid().isInitialized()) return false; + } for (se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MetadataEntryProtocol element : getMetadataList()) { if (!element.isInitialized()) return false; } @@ -3559,8 +3662,8 @@ public final class RemoteProtocol { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - if (hasId()) { - output.writeUInt64(1, getId()); + if (hasUuid()) { + output.writeMessage(1, getUuid()); } if (hasMessage()) { output.writeMessage(2, getMessage()); @@ -3569,7 +3672,7 @@ public final class RemoteProtocol { output.writeMessage(3, getException()); } if (hasSupervisorUuid()) { - output.writeString(4, getSupervisorUuid()); + output.writeMessage(4, getSupervisorUuid()); } if (hasIsActor()) { output.writeBool(5, getIsActor()); @@ -3589,9 +3692,9 @@ public final class RemoteProtocol { if (size != -1) return size; size = 0; - if (hasId()) { + if (hasUuid()) { size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, getId()); + .computeMessageSize(1, getUuid()); } if (hasMessage()) { size += com.google.protobuf.CodedOutputStream @@ -3603,7 +3706,7 @@ public final class RemoteProtocol { } if (hasSupervisorUuid()) { size += com.google.protobuf.CodedOutputStream - .computeStringSize(4, getSupervisorUuid()); + .computeMessageSize(4, getSupervisorUuid()); } if (hasIsActor()) { size += com.google.protobuf.CodedOutputStream @@ -3779,8 +3882,8 @@ public final class RemoteProtocol { public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol other) { if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol.getDefaultInstance()) return this; - if (other.hasId()) { - setId(other.getId()); + if (other.hasUuid()) { + mergeUuid(other.getUuid()); } if (other.hasMessage()) { mergeMessage(other.getMessage()); @@ -3789,7 +3892,7 @@ public final class RemoteProtocol { mergeException(other.getException()); } if (other.hasSupervisorUuid()) { - setSupervisorUuid(other.getSupervisorUuid()); + mergeSupervisorUuid(other.getSupervisorUuid()); } if (other.hasIsActor()) { setIsActor(other.getIsActor()); @@ -3828,8 +3931,13 @@ public final class RemoteProtocol { } break; } - case 8: { - setId(input.readUInt64()); + case 10: { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.newBuilder(); + if (hasUuid()) { + subBuilder.mergeFrom(getUuid()); + } + input.readMessage(subBuilder, extensionRegistry); + setUuid(subBuilder.buildPartial()); break; } case 18: { @@ -3851,7 +3959,12 @@ public final class RemoteProtocol { break; } case 34: { - setSupervisorUuid(input.readString()); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.newBuilder(); + if (hasSupervisorUuid()) { + subBuilder.mergeFrom(getSupervisorUuid()); + } + input.readMessage(subBuilder, extensionRegistry); + setSupervisorUuid(subBuilder.buildPartial()); break; } case 40: { @@ -3873,21 +3986,40 @@ public final class RemoteProtocol { } - // required uint64 id = 1; - public boolean hasId() { - return result.hasId(); + // required .UuidProtocol uuid = 1; + public boolean hasUuid() { + return result.hasUuid(); } - public long getId() { - return result.getId(); + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol getUuid() { + return result.getUuid(); } - public Builder setId(long value) { - result.hasId = true; - result.id_ = value; + public Builder setUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasUuid = true; + result.uuid_ = value; return this; } - public Builder clearId() { - result.hasId = false; - result.id_ = 0L; + public Builder setUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.Builder builderForValue) { + result.hasUuid = true; + result.uuid_ = builderForValue.build(); + return this; + } + public Builder mergeUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol value) { + if (result.hasUuid() && + result.uuid_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance()) { + result.uuid_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.newBuilder(result.uuid_).mergeFrom(value).buildPartial(); + } else { + result.uuid_ = value; + } + result.hasUuid = true; + return this; + } + public Builder clearUuid() { + result.hasUuid = false; + result.uuid_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance(); return this; } @@ -3965,24 +4097,40 @@ public final class RemoteProtocol { return this; } - // optional string supervisorUuid = 4; + // optional .UuidProtocol supervisorUuid = 4; public boolean hasSupervisorUuid() { return result.hasSupervisorUuid(); } - public java.lang.String getSupervisorUuid() { + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol getSupervisorUuid() { return result.getSupervisorUuid(); } - public Builder setSupervisorUuid(java.lang.String value) { + public Builder setSupervisorUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol value) { if (value == null) { - throw new NullPointerException(); - } - result.hasSupervisorUuid = true; + throw new NullPointerException(); + } + result.hasSupervisorUuid = true; result.supervisorUuid_ = value; return this; } + public Builder setSupervisorUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.Builder builderForValue) { + result.hasSupervisorUuid = true; + result.supervisorUuid_ = builderForValue.build(); + return this; + } + public Builder mergeSupervisorUuid(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol value) { + if (result.hasSupervisorUuid() && + result.supervisorUuid_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance()) { + result.supervisorUuid_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.newBuilder(result.supervisorUuid_).mergeFrom(value).buildPartial(); + } else { + result.supervisorUuid_ = value; + } + result.hasSupervisorUuid = true; + return this; + } public Builder clearSupervisorUuid() { result.hasSupervisorUuid = false; - result.supervisorUuid_ = getDefaultInstance().getSupervisorUuid(); + result.supervisorUuid_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.UuidProtocol.getDefaultInstance(); return this; } @@ -5764,52 +5912,55 @@ public final class RemoteProtocol { descriptor; static { java.lang.String[] descriptorData = { - "\n\024RemoteProtocol.proto\"v\n\026RemoteActorRef" + - "Protocol\022\014\n\004uuid\030\001 \002(\t\022\026\n\016actorClassname" + - "\030\002 \002(\t\022%\n\013homeAddress\030\003 \002(\0132\020.AddressPro" + - "tocol\022\017\n\007timeout\030\004 \001(\004\"\200\003\n\032SerializedAct" + - "orRefProtocol\022\014\n\004uuid\030\001 \002(\t\022\n\n\002id\030\002 \002(\t\022" + - "\026\n\016actorClassname\030\003 \002(\t\022)\n\017originalAddre" + - "ss\030\004 \002(\0132\020.AddressProtocol\022\025\n\ractorInsta" + - "nce\030\005 \001(\014\022\033\n\023serializerClassname\030\006 \001(\t\022\024" + - "\n\014isTransactor\030\007 \001(\010\022\017\n\007timeout\030\010 \001(\004\022\026\n" + - "\016receiveTimeout\030\t \001(\004\022%\n\tlifeCycle\030\n \001(\013", - "2\022.LifeCycleProtocol\022+\n\nsupervisor\030\013 \001(\013" + - "2\027.RemoteActorRefProtocol\022\024\n\014hotswapStac" + - "k\030\014 \001(\014\022(\n\010messages\030\r \003(\0132\026.RemoteReques" + - "tProtocol\"r\n\017MessageProtocol\0225\n\023serializ" + - "ationScheme\030\001 \002(\0162\030.SerializationSchemeT" + - "ype\022\017\n\007message\030\002 \002(\014\022\027\n\017messageManifest\030" + - "\003 \001(\014\"\236\001\n\021ActorInfoProtocol\022\014\n\004uuid\030\001 \002(" + - "\t\022\016\n\006target\030\002 \002(\t\022\017\n\007timeout\030\003 \002(\004\022\035\n\tac" + - "torType\030\004 \002(\0162\n.ActorType\022/\n\016typedActorI" + - "nfo\030\005 \001(\0132\027.TypedActorInfoProtocol\022\n\n\002id", - "\030\006 \001(\t\";\n\026TypedActorInfoProtocol\022\021\n\tinte" + - "rface\030\001 \002(\t\022\016\n\006method\030\002 \002(\t\"\352\001\n\025RemoteRe" + - "questProtocol\022\n\n\002id\030\001 \002(\004\022!\n\007message\030\002 \002" + - "(\0132\020.MessageProtocol\022%\n\tactorInfo\030\003 \002(\0132" + - "\022.ActorInfoProtocol\022\020\n\010isOneWay\030\004 \002(\010\022\026\n" + - "\016supervisorUuid\030\005 \001(\t\022\'\n\006sender\030\006 \001(\0132\027." + - "RemoteActorRefProtocol\022(\n\010metadata\030\007 \003(\013" + - "2\026.MetadataEntryProtocol\"\324\001\n\023RemoteReply" + - "Protocol\022\n\n\002id\030\001 \002(\004\022!\n\007message\030\002 \001(\0132\020." + - "MessageProtocol\022%\n\texception\030\003 \001(\0132\022.Exc", - "eptionProtocol\022\026\n\016supervisorUuid\030\004 \001(\t\022\017" + - "\n\007isActor\030\005 \002(\010\022\024\n\014isSuccessful\030\006 \002(\010\022(\n" + - "\010metadata\030\007 \003(\0132\026.MetadataEntryProtocol\"" + - ")\n\014UuidProtocol\022\014\n\004high\030\001 \002(\004\022\013\n\003low\030\002 \002" + - "(\004\"3\n\025MetadataEntryProtocol\022\013\n\003key\030\001 \002(\t" + - "\022\r\n\005value\030\002 \002(\014\"6\n\021LifeCycleProtocol\022!\n\t" + - "lifeCycle\030\001 \002(\0162\016.LifeCycleType\"1\n\017Addre" + - "ssProtocol\022\020\n\010hostname\030\001 \002(\t\022\014\n\004port\030\002 \002" + - "(\r\"7\n\021ExceptionProtocol\022\021\n\tclassname\030\001 \002" + - "(\t\022\017\n\007message\030\002 \002(\t*=\n\tActorType\022\017\n\013SCAL", - "A_ACTOR\020\001\022\016\n\nJAVA_ACTOR\020\002\022\017\n\013TYPED_ACTOR" + - "\020\003*]\n\027SerializationSchemeType\022\010\n\004JAVA\020\001\022" + - "\013\n\007SBINARY\020\002\022\016\n\nSCALA_JSON\020\003\022\r\n\tJAVA_JSO" + - "N\020\004\022\014\n\010PROTOBUF\020\005*-\n\rLifeCycleType\022\r\n\tPE" + - "RMANENT\020\001\022\r\n\tTEMPORARY\020\002B-\n)se.scalables" + - "olutions.akka.remote.protocolH\001" + "\n\024RemoteProtocol.proto\"\204\001\n\026RemoteActorRe" + + "fProtocol\022\032\n\022classOrServiceName\030\001 \002(\t\022\026\n" + + "\016actorClassname\030\002 \002(\t\022%\n\013homeAddress\030\003 \002" + + "(\0132\020.AddressProtocol\022\017\n\007timeout\030\004 \001(\004\"\217\003" + + "\n\032SerializedActorRefProtocol\022\033\n\004uuid\030\001 \002" + + "(\0132\r.UuidProtocol\022\n\n\002id\030\002 \002(\t\022\026\n\016actorCl" + + "assname\030\003 \002(\t\022)\n\017originalAddress\030\004 \002(\0132\020" + + ".AddressProtocol\022\025\n\ractorInstance\030\005 \001(\014\022" + + "\033\n\023serializerClassname\030\006 \001(\t\022\024\n\014isTransa" + + "ctor\030\007 \001(\010\022\017\n\007timeout\030\010 \001(\004\022\026\n\016receiveTi", + "meout\030\t \001(\004\022%\n\tlifeCycle\030\n \001(\0132\022.LifeCyc" + + "leProtocol\022+\n\nsupervisor\030\013 \001(\0132\027.RemoteA" + + "ctorRefProtocol\022\024\n\014hotswapStack\030\014 \001(\014\022(\n" + + "\010messages\030\r \003(\0132\026.RemoteRequestProtocol\"" + + "r\n\017MessageProtocol\0225\n\023serializationSchem" + + "e\030\001 \002(\0162\030.SerializationSchemeType\022\017\n\007mes" + + "sage\030\002 \002(\014\022\027\n\017messageManifest\030\003 \001(\014\"\255\001\n\021" + + "ActorInfoProtocol\022\033\n\004uuid\030\001 \002(\0132\r.UuidPr" + + "otocol\022\016\n\006target\030\002 \002(\t\022\017\n\007timeout\030\003 \002(\004\022" + + "\035\n\tactorType\030\004 \002(\0162\n.ActorType\022/\n\016typedA", + "ctorInfo\030\005 \001(\0132\027.TypedActorInfoProtocol\022" + + "\n\n\002id\030\006 \001(\t\";\n\026TypedActorInfoProtocol\022\021\n" + + "\tinterface\030\001 \002(\t\022\016\n\006method\030\002 \002(\t\"\212\002\n\025Rem" + + "oteRequestProtocol\022\033\n\004uuid\030\001 \002(\0132\r.UuidP" + + "rotocol\022!\n\007message\030\002 \002(\0132\020.MessageProtoc" + + "ol\022%\n\tactorInfo\030\003 \002(\0132\022.ActorInfoProtoco" + + "l\022\020\n\010isOneWay\030\004 \002(\010\022%\n\016supervisorUuid\030\005 " + + "\001(\0132\r.UuidProtocol\022\'\n\006sender\030\006 \001(\0132\027.Rem" + + "oteActorRefProtocol\022(\n\010metadata\030\007 \003(\0132\026." + + "MetadataEntryProtocol\"\364\001\n\023RemoteReplyPro", + "tocol\022\033\n\004uuid\030\001 \002(\0132\r.UuidProtocol\022!\n\007me" + + "ssage\030\002 \001(\0132\020.MessageProtocol\022%\n\texcepti" + + "on\030\003 \001(\0132\022.ExceptionProtocol\022%\n\016supervis" + + "orUuid\030\004 \001(\0132\r.UuidProtocol\022\017\n\007isActor\030\005" + + " \002(\010\022\024\n\014isSuccessful\030\006 \002(\010\022(\n\010metadata\030\007" + + " \003(\0132\026.MetadataEntryProtocol\")\n\014UuidProt" + + "ocol\022\014\n\004high\030\001 \002(\004\022\013\n\003low\030\002 \002(\004\"3\n\025Metad" + + "ataEntryProtocol\022\013\n\003key\030\001 \002(\t\022\r\n\005value\030\002" + + " \002(\014\"6\n\021LifeCycleProtocol\022!\n\tlifeCycle\030\001" + + " \002(\0162\016.LifeCycleType\"1\n\017AddressProtocol\022", + "\020\n\010hostname\030\001 \002(\t\022\014\n\004port\030\002 \002(\r\"7\n\021Excep" + + "tionProtocol\022\021\n\tclassname\030\001 \002(\t\022\017\n\007messa" + + "ge\030\002 \002(\t*=\n\tActorType\022\017\n\013SCALA_ACTOR\020\001\022\016" + + "\n\nJAVA_ACTOR\020\002\022\017\n\013TYPED_ACTOR\020\003*]\n\027Seria" + + "lizationSchemeType\022\010\n\004JAVA\020\001\022\013\n\007SBINARY\020" + + "\002\022\016\n\nSCALA_JSON\020\003\022\r\n\tJAVA_JSON\020\004\022\014\n\010PROT" + + "OBUF\020\005*-\n\rLifeCycleType\022\r\n\tPERMANENT\020\001\022\r" + + "\n\tTEMPORARY\020\002B-\n)se.scalablesolutions.ak" + + "ka.remote.protocolH\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -5821,7 +5972,7 @@ public final class RemoteProtocol { internal_static_RemoteActorRefProtocol_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RemoteActorRefProtocol_descriptor, - new java.lang.String[] { "Uuid", "ActorClassname", "HomeAddress", "Timeout", }, + new java.lang.String[] { "ClassOrServiceName", "ActorClassname", "HomeAddress", "Timeout", }, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.class, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.Builder.class); internal_static_SerializedActorRefProtocol_descriptor = @@ -5861,7 +6012,7 @@ public final class RemoteProtocol { internal_static_RemoteRequestProtocol_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RemoteRequestProtocol_descriptor, - new java.lang.String[] { "Id", "Message", "ActorInfo", "IsOneWay", "SupervisorUuid", "Sender", "Metadata", }, + new java.lang.String[] { "Uuid", "Message", "ActorInfo", "IsOneWay", "SupervisorUuid", "Sender", "Metadata", }, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.class, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.Builder.class); internal_static_RemoteReplyProtocol_descriptor = @@ -5869,7 +6020,7 @@ public final class RemoteProtocol { internal_static_RemoteReplyProtocol_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RemoteReplyProtocol_descriptor, - new java.lang.String[] { "Id", "Message", "Exception", "SupervisorUuid", "IsActor", "IsSuccessful", "Metadata", }, + new java.lang.String[] { "Uuid", "Message", "Exception", "SupervisorUuid", "IsActor", "IsSuccessful", "Metadata", }, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol.class, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol.Builder.class); internal_static_UuidProtocol_descriptor = diff --git a/akka-remote/src/main/protocol/RemoteProtocol.proto b/akka-remote/src/main/protocol/RemoteProtocol.proto index 4f2fa5c2dd..55c3ae35fd 100644 --- a/akka-remote/src/main/protocol/RemoteProtocol.proto +++ b/akka-remote/src/main/protocol/RemoteProtocol.proto @@ -16,7 +16,7 @@ option optimize_for = SPEED; * on the original node. */ message RemoteActorRefProtocol { - required UuidProtocol uuid = 1; + required string classOrServiceName = 1; required string actorClassname = 2; required AddressProtocol homeAddress = 3; optional uint64 timeout = 4; @@ -80,7 +80,7 @@ message RemoteRequestProtocol { required MessageProtocol message = 2; required ActorInfoProtocol actorInfo = 3; required bool isOneWay = 4; - optional string supervisorUuid = 5; + optional UuidProtocol supervisorUuid = 5; optional RemoteActorRefProtocol sender = 6; repeated MetadataEntryProtocol metadata = 7; } @@ -92,7 +92,7 @@ message RemoteReplyProtocol { required UuidProtocol uuid = 1; optional MessageProtocol message = 2; optional ExceptionProtocol exception = 3; - optional string supervisorUuid = 4; + optional UuidProtocol supervisorUuid = 4; required bool isActor = 5; required bool isSuccessful = 6; repeated MetadataEntryProtocol metadata = 7; diff --git a/akka-remote/src/main/scala/remote/RemoteClient.scala b/akka-remote/src/main/scala/remote/RemoteClient.scala index 26cc275956..bb9714bc61 100644 --- a/akka-remote/src/main/scala/remote/RemoteClient.scala +++ b/akka-remote/src/main/scala/remote/RemoteClient.scala @@ -7,12 +7,12 @@ package se.scalablesolutions.akka.remote import se.scalablesolutions.akka.remote.protocol.RemoteProtocol.{ActorType => ActorTypeProtocol, _} import se.scalablesolutions.akka.actor.{Exit, Actor, ActorRef, ActorType, RemoteActorRef, IllegalActorStateException} import se.scalablesolutions.akka.dispatch.{DefaultCompletableFuture, CompletableFuture} -import se.scalablesolutions.akka.util.{ListenerManagement, UUID, Logging, Duration} +import se.scalablesolutions.akka.util.{ListenerManagement, Logging, Duration} +import se.scalablesolutions.akka.actor.{Uuid,newUuid,uuidFrom} import se.scalablesolutions.akka.config.Config._ import se.scalablesolutions.akka.serialization.RemoteActorSerialization._ import se.scalablesolutions.akka.AkkaException import Actor._ - import org.jboss.netty.channel._ import group.DefaultChannelGroup import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory @@ -63,7 +63,7 @@ object RemoteClient extends Logging { val RECONNECT_DELAY = Duration(config.getInt("akka.remote.client.reconnect-delay", 5), TIME_UNIT) private val remoteClients = new HashMap[String, RemoteClient] - private val remoteActors = new HashMap[RemoteServer.Address, HashSet[String]] + private val remoteActors = new HashMap[RemoteServer.Address, HashSet[Uuid]] def actorFor(classNameOrServiceId: String, hostname: String, port: Int): ActorRef = actorFor(classNameOrServiceId, classNameOrServiceId, 5000L, hostname, port, None) @@ -162,21 +162,21 @@ object RemoteClient extends Logging { remoteClients.clear } - def register(hostname: String, port: Int, uuid: String) = synchronized { + def register(hostname: String, port: Int, uuid: Uuid) = synchronized { actorsFor(RemoteServer.Address(hostname, port)) += uuid } - private[akka] def unregister(hostname: String, port: Int, uuid: String) = synchronized { + private[akka] def unregister(hostname: String, port: Int, uuid: Uuid) = synchronized { val set = actorsFor(RemoteServer.Address(hostname, port)) set -= uuid if (set.isEmpty) shutdownClientFor(new InetSocketAddress(hostname, port)) } - private[akka] def actorsFor(remoteServerAddress: RemoteServer.Address): HashSet[String] = { + private[akka] def actorsFor(remoteServerAddress: RemoteServer.Address): HashSet[Uuid] = { val set = remoteActors.get(remoteServerAddress) if (set.isDefined && (set.get ne null)) set.get else { - val remoteActorSet = new HashSet[String] + val remoteActorSet = new HashSet[Uuid] remoteActors.put(remoteServerAddress, remoteActorSet) remoteActorSet } @@ -194,8 +194,8 @@ class RemoteClient private[akka] ( val name = "RemoteClient@" + hostname + "::" + port //FIXME Should these be clear:ed on postStop? - private val futures = new ConcurrentHashMap[Long, CompletableFuture[_]] - private val supervisors = new ConcurrentHashMap[String, ActorRef] + private val futures = new ConcurrentHashMap[Uuid, CompletableFuture[_]] + private val supervisors = new ConcurrentHashMap[Uuid, ActorRef] private val remoteAddress = new InetSocketAddress(hostname, port) @@ -287,7 +287,7 @@ class RemoteClient private[akka] ( futures.synchronized { val futureResult = if (senderFuture.isDefined) senderFuture.get else new DefaultCompletableFuture[T](request.getActorInfo.getTimeout) - futures.put(request.getId, futureResult) + futures.put(uuidFrom(request.getUuid.getHigh,request.getUuid.getLow), futureResult) connection.getChannel.write(request) Some(futureResult) } @@ -330,8 +330,8 @@ class RemoteClient private[akka] ( */ class RemoteClientPipelineFactory( name: String, - futures: ConcurrentMap[Long, CompletableFuture[_]], - supervisors: ConcurrentMap[String, ActorRef], + futures: ConcurrentMap[Uuid, CompletableFuture[_]], + supervisors: ConcurrentMap[Uuid, ActorRef], bootstrap: ClientBootstrap, remoteAddress: SocketAddress, timer: HashedWheelTimer, @@ -370,8 +370,8 @@ class RemoteClientPipelineFactory( @ChannelHandler.Sharable class RemoteClientHandler( val name: String, - val futures: ConcurrentMap[Long, CompletableFuture[_]], - val supervisors: ConcurrentMap[String, ActorRef], + val futures: ConcurrentMap[Uuid, CompletableFuture[_]], + val supervisors: ConcurrentMap[Uuid, ActorRef], val bootstrap: ClientBootstrap, val remoteAddress: SocketAddress, val timer: HashedWheelTimer, @@ -391,14 +391,15 @@ class RemoteClientHandler( val result = event.getMessage if (result.isInstanceOf[RemoteReplyProtocol]) { val reply = result.asInstanceOf[RemoteReplyProtocol] + val replyUuid = uuidFrom(reply.getUuid.getHigh,reply.getUuid.getLow) log.debug("Remote client received RemoteReplyProtocol[\n%s]", reply.toString) - val future = futures.get(reply.getId).asInstanceOf[CompletableFuture[Any]] + val future = futures.get(replyUuid).asInstanceOf[CompletableFuture[Any]] if (reply.getIsSuccessful) { val message = MessageSerializer.deserialize(reply.getMessage) future.completeWithResult(message) } else { if (reply.hasSupervisorUuid()) { - val supervisorUuid = reply.getSupervisorUuid + val supervisorUuid = uuidFrom(reply.getSupervisorUuid.getHigh,reply.getSupervisorUuid.getLow) if (!supervisors.containsKey(supervisorUuid)) throw new IllegalActorStateException( "Expected a registered supervisor for UUID [" + supervisorUuid + "] but none was found") val supervisedActor = supervisors.get(supervisorUuid) @@ -408,7 +409,7 @@ class RemoteClientHandler( } future.completeWithException(parseException(reply, client.loader)) } - futures.remove(reply.getId) + futures remove replyUuid } else { val exception = new RemoteClientException("Unknown message received in remote client handler: " + result, client) client.notifyListeners(RemoteClientError(exception, client)) diff --git a/akka-remote/src/main/scala/remote/RemoteServer.scala b/akka-remote/src/main/scala/remote/RemoteServer.scala index c1f25b6d4f..a9841baf8c 100644 --- a/akka-remote/src/main/scala/remote/RemoteServer.scala +++ b/akka-remote/src/main/scala/remote/RemoteServer.scala @@ -11,6 +11,7 @@ import java.util.{Map => JMap} import se.scalablesolutions.akka.actor.{ Actor, TypedActor, ActorRef, IllegalActorStateException, RemoteActorSystemMessage} +import se.scalablesolutions.akka.actor.{Uuid,uuidFrom} import se.scalablesolutions.akka.actor.Actor._ import se.scalablesolutions.akka.util._ import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ @@ -122,19 +123,19 @@ object RemoteServer { } private class RemoteActorSet { - private[RemoteServer] val actors = new ConcurrentHashMap[String, ActorRef] - private[RemoteServer] val typedActors = new ConcurrentHashMap[String, AnyRef] + private[RemoteServer] val actors = new ConcurrentHashMap[Object, ActorRef] + private[RemoteServer] val typedActors = new ConcurrentHashMap[Object, AnyRef] } private val guard = new ReadWriteGuard private val remoteActorSets = Map[Address, RemoteActorSet]() private val remoteServers = Map[Address, RemoteServer]() - private[akka] def registerActor(address: InetSocketAddress, uuid: String, actor: ActorRef) = guard.withWriteGuard { + private[akka] def registerActor(address: InetSocketAddress, uuid: Uuid, actor: ActorRef) = guard.withWriteGuard { actorsFor(RemoteServer.Address(address.getHostName, address.getPort)).actors.put(uuid, actor) } - private[akka] def registerTypedActor(address: InetSocketAddress, uuid: String, typedActor: AnyRef) = guard.withWriteGuard { + private[akka] def registerTypedActor(address: InetSocketAddress, uuid: Uuid, typedActor: AnyRef) = guard.withWriteGuard { actorsFor(RemoteServer.Address(address.getHostName, address.getPort)).typedActors.put(uuid, typedActor) } @@ -504,7 +505,7 @@ class RemoteServerHandler( override def onComplete(result: AnyRef) { log.debug("Returning result from actor invocation [%s]", result) val replyBuilder = RemoteReplyProtocol.newBuilder - .setId(request.getId) + .setUuid(request.getUuid) .setMessage(MessageSerializer.serialize(result)) .setIsSuccessful(true) .setIsActor(true) @@ -548,7 +549,7 @@ class RemoteServerHandler( val result = messageReceiver.invoke(typedActor, args: _*) log.debug("Returning result from remote typed actor invocation [%s]", result) val replyBuilder = RemoteReplyProtocol.newBuilder - .setId(request.getId) + .setUuid(request.getUuid) .setMessage(MessageSerializer.serialize(result)) .setIsSuccessful(true) .setIsActor(false) @@ -569,7 +570,7 @@ class RemoteServerHandler( * Find a registered actor by ID (default) or UUID. * Actors are registered by id apart from registering during serialization see SerializationProtocol. */ - private def findActorByIdOrUuid(id: String, uuid: String) : ActorRef = { + private def findActorByIdOrUuid(id: String, uuid: Uuid) : ActorRef = { val registeredActors = server.actors() var actorRefOrNull = registeredActors get id if (actorRefOrNull eq null) { @@ -582,7 +583,7 @@ class RemoteServerHandler( * Find a registered typed actor by ID (default) or UUID. * Actors are registered by id apart from registering during serialization see SerializationProtocol. */ - private def findTypedActorByIdOrUUid(id: String, uuid: String) : AnyRef = { + private def findTypedActorByIdOrUUid(id: String, uuid: Uuid) : AnyRef = { val registeredActors = server.typedActors() var actorRefOrNull = registeredActors get id if (actorRefOrNull eq null) { @@ -599,7 +600,7 @@ class RemoteServerHandler( * Does not start the actor. */ private def createActor(actorInfo: ActorInfoProtocol): ActorRef = { - val uuid = actorInfo.getUuid + val uuid = uuidFrom(actorInfo.getUuid.getHigh,actorInfo.getUuid.getLow) val id = actorInfo.getId val name = actorInfo.getTarget @@ -629,7 +630,7 @@ class RemoteServerHandler( } private def createTypedActor(actorInfo: ActorInfoProtocol): AnyRef = { - val uuid = actorInfo.getUuid + val uuid = uuidFrom(actorInfo.getUuid.getHigh,actorInfo.getUuid.getLow) val id = actorInfo.getId val typedActorOrNull = findTypedActorByIdOrUUid(id, uuid) @@ -664,7 +665,7 @@ class RemoteServerHandler( val actorInfo = request.getActorInfo log.error(e, "Could not invoke remote typed actor [%s :: %s]", actorInfo.getTypedActorInfo.getMethod, actorInfo.getTarget) val replyBuilder = RemoteReplyProtocol.newBuilder - .setId(request.getId) + .setUuid(request.getUuid) .setException(ExceptionProtocol.newBuilder.setClassname(e.getClass.getName).setMessage(e.getMessage).build) .setIsSuccessful(false) .setIsActor(isActor) diff --git a/akka-remote/src/main/scala/serialization/SerializationProtocol.scala b/akka-remote/src/main/scala/serialization/SerializationProtocol.scala index bc1aa9052f..e94565ad97 100644 --- a/akka-remote/src/main/scala/serialization/SerializationProtocol.scala +++ b/akka-remote/src/main/scala/serialization/SerializationProtocol.scala @@ -14,9 +14,8 @@ import se.scalablesolutions.akka.remote.protocol.RemoteProtocol.{ActorType => Ac import ActorTypeProtocol._ import se.scalablesolutions.akka.config.{AllForOneStrategy, OneForOneStrategy, FaultHandlingStrategy} import se.scalablesolutions.akka.config.ScalaConfig._ - +import se.scalablesolutions.akka.actor.{uuidFrom,newUuid} import com.google.protobuf.ByteString -import se.scalablesolutions.akka.util.UUID /** * Type class definition for Actor Serialization @@ -109,7 +108,7 @@ object ActorSerialization { .build val builder = SerializedActorRefProtocol.newBuilder - .setUuid(actorRef.uuid) + .setUuid(UuidProtocol.newBuilder.setHigh(actorRef.uuid.getTime).setLow(actorRef.uuid.getClockSeqAndNode).build) .setId(actorRef.id) .setActorClassname(actorRef.actorClass.getName) .setOriginalAddress(originalAddress) @@ -167,7 +166,7 @@ object ActorSerialization { } val ar = new LocalActorRef( - protocol.getUuid, + uuidFrom(protocol.getUuid.getHigh,protocol.getUuid.getLow), protocol.getId, protocol.getOriginalAddress.getHostname, protocol.getOriginalAddress.getPort, @@ -208,7 +207,7 @@ object RemoteActorSerialization { private[akka] def fromProtobufToRemoteActorRef(protocol: RemoteActorRefProtocol, loader: Option[ClassLoader]): ActorRef = { Actor.log.debug("Deserializing RemoteActorRefProtocol to RemoteActorRef:\n" + protocol) RemoteActorRef( - protocol.getUuid, + protocol.getClassOrServiceName, protocol.getActorClassname, protocol.getHomeAddress.getHostname, protocol.getHomeAddress.getPort, @@ -232,7 +231,7 @@ object RemoteActorSerialization { } RemoteActorRefProtocol.newBuilder - .setUuid(uuid) + .setClassOrServiceName(id) .setActorClassname(actorClass.getName) .setHomeAddress(AddressProtocol.newBuilder.setHostname(host).setPort(port).build) .setTimeout(timeout) @@ -250,7 +249,7 @@ object RemoteActorSerialization { import actorRef._ val actorInfoBuilder = ActorInfoProtocol.newBuilder - .setUuid(uuid) + .setUuid(UuidProtocol.newBuilder.setHigh(uuid.getTime).setLow(uuid.getClockSeqAndNode).build) .setId(actorRef.id) .setTarget(actorClassName) .setTimeout(timeout) @@ -270,16 +269,16 @@ object RemoteActorSerialization { val actorInfo = actorInfoBuilder.build val requestBuilder = RemoteRequestProtocol.newBuilder - .setId(UUID.newUuid) + .setUuid(UuidProtocol.newBuilder.setHigh(uuid.getTime).setLow(uuid.getClockSeqAndNode).build) .setMessage(MessageSerializer.serialize(message)) .setActorInfo(actorInfo) .setIsOneWay(isOneWay) val id = registerSupervisorAsRemoteActor - if (id.isDefined) requestBuilder.setSupervisorUuid(id.get) + if (id.isDefined) requestBuilder.setSupervisorUuid(UuidProtocol.newBuilder.setHigh(id.get.getTime).setLow(id.get.getClockSeqAndNode).build) senderOption.foreach { sender => - RemoteServer.getOrCreateServer(sender.homeAddress).register(sender.uuid, sender) + RemoteServer.getOrCreateServer(sender.homeAddress).register(sender.uuid.toString, sender) requestBuilder.setSender(toRemoteActorRefProtocol(sender)) } requestBuilder diff --git a/akka-remote/src/test/scala/ticket/Ticket434Spec.scala b/akka-remote/src/test/scala/ticket/Ticket434Spec.scala index b27c17cfe5..404b8fe22f 100644 --- a/akka-remote/src/test/scala/ticket/Ticket434Spec.scala +++ b/akka-remote/src/test/scala/ticket/Ticket434Spec.scala @@ -6,6 +6,7 @@ package se.scalablesolutions.akka.actor.ticket import org.scalatest.Spec import org.scalatest.matchers.ShouldMatchers import se.scalablesolutions.akka.actor.Actor._ +import se.scalablesolutions.akka.actor.{Uuid,newUuid,uuidFrom} import se.scalablesolutions.akka.actor.remote.ServerInitiatedRemoteActorSpec.RemoteActorSpecActorUnidirectional import java.util.concurrent.TimeUnit import se.scalablesolutions.akka.remote.{RemoteClient, RemoteServer} @@ -32,14 +33,15 @@ class Ticket434Spec extends Spec with ShouldMatchers { describe("The ActorInfoProtocol") { it("should be possible to set the acor id and uuuid") { + val uuid = newUuid val actorInfoBuilder = ActorInfoProtocol.newBuilder - .setUuid("unique-id") + .setUuid(UuidProtocol.newBuilder.setHigh(uuid.getTime).setLow(uuid.getClockSeqAndNode).build) .setId("some-id") .setTarget("actorClassName") .setTimeout(5000L) .setActorType(ActorType.SCALA_ACTOR) val actorInfo = actorInfoBuilder.build - assert(actorInfo.getUuid === "unique-id") + assert(uuidFrom(actorInfo.getUuid.getHigh,actorInfo.getUuid.getLow) === uuid) assert(actorInfo.getId === "some-id") } } diff --git a/akka-typed-actor/src/main/scala/actor/TypedActor.scala b/akka-typed-actor/src/main/scala/actor/TypedActor.scala index c3457cb43b..2ae01a5670 100644 --- a/akka-typed-actor/src/main/scala/actor/TypedActor.scala +++ b/akka-typed-actor/src/main/scala/actor/TypedActor.scala @@ -622,7 +622,7 @@ private[akka] abstract class ActorAspect { protected var typedActor: TypedActor = _ protected var actorRef: ActorRef = _ protected var timeout: Long = _ - protected var uuid: String = _ + protected var uuid: Uuid = _ protected var remoteAddress: Option[InetSocketAddress] = _ protected def localDispatch(joinPoint: JoinPoint): AnyRef = { From a6cc67a1df72ad85f05dc6f874d0182949fc02e8 Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Sun, 19 Sep 2010 17:02:15 +0200 Subject: [PATCH 11/52] Its a wrap! --- akka-actor/src/main/scala/actor/Implicits.scala | 4 +++- akka-actor/src/main/scala/util/Logging.scala | 2 +- .../main/scala/component/ActorComponent.scala | 17 +++++++---------- .../scala/component/ActorComponentTest.scala | 11 +++++++---- .../src/main/scala/remote/RemoteServer.scala | 8 ++++---- .../serialization/SerializationProtocol.scala | 2 +- 6 files changed, 23 insertions(+), 21 deletions(-) diff --git a/akka-actor/src/main/scala/actor/Implicits.scala b/akka-actor/src/main/scala/actor/Implicits.scala index f228d45186..9992cd36a1 100644 --- a/akka-actor/src/main/scala/actor/Implicits.scala +++ b/akka-actor/src/main/scala/actor/Implicits.scala @@ -16,5 +16,7 @@ package object actor { type Uuid = com.eaio.uuid.UUID def newUuid(): Uuid = new Uuid() def uuidFrom(time: Long, clockSeqAndNode: Long): Uuid = new Uuid(time,clockSeqAndNode) - def uuidFrom(uuid: String) = new Uuid(uuid) + def uuidFrom(uuid: String): Uuid = { + new Uuid(uuid) + } } diff --git a/akka-actor/src/main/scala/util/Logging.scala b/akka-actor/src/main/scala/util/Logging.scala index b6ddaaa16a..8d2e64be58 100644 --- a/akka-actor/src/main/scala/util/Logging.scala +++ b/akka-actor/src/main/scala/util/Logging.scala @@ -111,7 +111,7 @@ class Logger(val logger: SLFLogger) { warning(message(fmt,arg,argN:_*)) } - def warn(fmt: => String, arg: Any, argN: Any*) = warning(fmt, arg, argN) + def warn(fmt: => String, arg: Any, argN: Any*) = warning(fmt, arg, argN:_*) def warning(msg: => String) { if (warning_?) logger warn msg diff --git a/akka-camel/src/main/scala/component/ActorComponent.scala b/akka-camel/src/main/scala/component/ActorComponent.scala index ccd4c63f0c..fefbeffbb6 100644 --- a/akka-camel/src/main/scala/component/ActorComponent.scala +++ b/akka-camel/src/main/scala/component/ActorComponent.scala @@ -33,18 +33,15 @@ import scala.reflect.BeanProperty */ class ActorComponent extends DefaultComponent { def createEndpoint(uri: String, remaining: String, parameters: JavaMap[String, Object]): ActorEndpoint = { - val idAndUuid = idAndUuidPair(remaining) - new ActorEndpoint(uri, this, idAndUuid._1, idAndUuid._2) + val (id,uuid) = idAndUuidPair(remaining) + new ActorEndpoint(uri, this, id, uuid) } - private def idAndUuidPair(remaining: String): Tuple2[Option[String], Option[Uuid]] = { - remaining split ":" toList match { - case id :: Nil => (Some(id), None) - case "id" :: id :: Nil => (Some(id), None) - case "uuid" :: uuid :: Nil => (None, Some(uuidFrom(uuid))) - case _ => throw new IllegalArgumentException( - "invalid path format: %s - should be or id: or uuid:" format remaining) - } + private def idAndUuidPair(remaining: String): Tuple2[Option[String],Option[Uuid]] = remaining match { + case null => throw new IllegalArgumentException("invalid path format: %s - should be or id: or uuid:" format remaining) + case id if id startsWith "id:" => (Some(id substring 3),None) + case uuid if uuid startsWith "uuid:" => (None,Some(uuidFrom(uuid substring 5))) + case id => (Some(id),None) } } diff --git a/akka-camel/src/test/scala/component/ActorComponentTest.scala b/akka-camel/src/test/scala/component/ActorComponentTest.scala index e27e8c5875..f35e8b3885 100644 --- a/akka-camel/src/test/scala/component/ActorComponentTest.scala +++ b/akka-camel/src/test/scala/component/ActorComponentTest.scala @@ -4,10 +4,13 @@ import org.apache.camel.{Endpoint, AsyncProcessor} import org.apache.camel.impl.DefaultCamelContext import org.junit._ import org.scalatest.junit.JUnitSuite +import se.scalablesolutions.akka.actor.uuidFrom class ActorComponentTest extends JUnitSuite { val component: ActorComponent = ActorComponentTest.actorComponent + def testUUID = uuidFrom("93da8c80-c3fd-11df-abed-60334b120057") + @Test def shouldCreateEndpointWithIdDefined = { val ep1: ActorEndpoint = component.createEndpoint("actor:abc").asInstanceOf[ActorEndpoint] val ep2: ActorEndpoint = component.createEndpoint("actor:id:abc").asInstanceOf[ActorEndpoint] @@ -20,15 +23,15 @@ class ActorComponentTest extends JUnitSuite { } @Test def shouldCreateEndpointWithUuidDefined = { - val ep: ActorEndpoint = component.createEndpoint("actor:uuid:abc").asInstanceOf[ActorEndpoint] - assert(ep.uuid === Some("abc")) + val ep: ActorEndpoint = component.createEndpoint("actor:uuid:" + testUUID).asInstanceOf[ActorEndpoint] + assert(ep.uuid === Some(testUUID)) assert(ep.id === None) assert(!ep.blocking) } @Test def shouldCreateEndpointWithBlockingSet = { - val ep: ActorEndpoint = component.createEndpoint("actor:uuid:abc?blocking=true").asInstanceOf[ActorEndpoint] - assert(ep.uuid === Some("abc")) + val ep: ActorEndpoint = component.createEndpoint("actor:uuid:"+testUUID+"?blocking=true").asInstanceOf[ActorEndpoint] + assert(ep.uuid === Some(testUUID)) assert(ep.id === None) assert(ep.blocking) } diff --git a/akka-remote/src/main/scala/remote/RemoteServer.scala b/akka-remote/src/main/scala/remote/RemoteServer.scala index a9841baf8c..bacaf22546 100644 --- a/akka-remote/src/main/scala/remote/RemoteServer.scala +++ b/akka-remote/src/main/scala/remote/RemoteServer.scala @@ -123,8 +123,8 @@ object RemoteServer { } private class RemoteActorSet { - private[RemoteServer] val actors = new ConcurrentHashMap[Object, ActorRef] - private[RemoteServer] val typedActors = new ConcurrentHashMap[Object, AnyRef] + private[RemoteServer] val actors = new ConcurrentHashMap[String, ActorRef] + private[RemoteServer] val typedActors = new ConcurrentHashMap[String, AnyRef] } private val guard = new ReadWriteGuard @@ -132,11 +132,11 @@ object RemoteServer { private val remoteServers = Map[Address, RemoteServer]() private[akka] def registerActor(address: InetSocketAddress, uuid: Uuid, actor: ActorRef) = guard.withWriteGuard { - actorsFor(RemoteServer.Address(address.getHostName, address.getPort)).actors.put(uuid, actor) + actorsFor(RemoteServer.Address(address.getHostName, address.getPort)).actors.put(uuid.toString, actor) } private[akka] def registerTypedActor(address: InetSocketAddress, uuid: Uuid, typedActor: AnyRef) = guard.withWriteGuard { - actorsFor(RemoteServer.Address(address.getHostName, address.getPort)).typedActors.put(uuid, typedActor) + actorsFor(RemoteServer.Address(address.getHostName, address.getPort)).typedActors.put(uuid.toString, typedActor) } private[akka] def getOrCreateServer(address: InetSocketAddress): RemoteServer = guard.withWriteGuard { diff --git a/akka-remote/src/main/scala/serialization/SerializationProtocol.scala b/akka-remote/src/main/scala/serialization/SerializationProtocol.scala index e94565ad97..7997be128b 100644 --- a/akka-remote/src/main/scala/serialization/SerializationProtocol.scala +++ b/akka-remote/src/main/scala/serialization/SerializationProtocol.scala @@ -231,7 +231,7 @@ object RemoteActorSerialization { } RemoteActorRefProtocol.newBuilder - .setClassOrServiceName(id) + .setClassOrServiceName(uuid.toString) .setActorClassname(actorClass.getName) .setHomeAddress(AddressProtocol.newBuilder.setHostname(host).setPort(port).build) .setTimeout(timeout) From 5a98ba6ea585829061e23b69572bcea2893db4f5 Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Sun, 19 Sep 2010 17:02:15 +0200 Subject: [PATCH 12/52] Its a wrap! --- akka-actor/src/main/scala/actor/Implicits.scala | 4 +++- akka-actor/src/main/scala/util/Logging.scala | 2 +- .../main/scala/component/ActorComponent.scala | 17 +++++++---------- .../scala/component/ActorComponentTest.scala | 11 +++++++---- .../src/main/scala/remote/RemoteServer.scala | 8 ++++---- .../serialization/SerializationProtocol.scala | 2 +- 6 files changed, 23 insertions(+), 21 deletions(-) diff --git a/akka-actor/src/main/scala/actor/Implicits.scala b/akka-actor/src/main/scala/actor/Implicits.scala index f228d45186..9992cd36a1 100644 --- a/akka-actor/src/main/scala/actor/Implicits.scala +++ b/akka-actor/src/main/scala/actor/Implicits.scala @@ -16,5 +16,7 @@ package object actor { type Uuid = com.eaio.uuid.UUID def newUuid(): Uuid = new Uuid() def uuidFrom(time: Long, clockSeqAndNode: Long): Uuid = new Uuid(time,clockSeqAndNode) - def uuidFrom(uuid: String) = new Uuid(uuid) + def uuidFrom(uuid: String): Uuid = { + new Uuid(uuid) + } } diff --git a/akka-actor/src/main/scala/util/Logging.scala b/akka-actor/src/main/scala/util/Logging.scala index b6ddaaa16a..8d2e64be58 100644 --- a/akka-actor/src/main/scala/util/Logging.scala +++ b/akka-actor/src/main/scala/util/Logging.scala @@ -111,7 +111,7 @@ class Logger(val logger: SLFLogger) { warning(message(fmt,arg,argN:_*)) } - def warn(fmt: => String, arg: Any, argN: Any*) = warning(fmt, arg, argN) + def warn(fmt: => String, arg: Any, argN: Any*) = warning(fmt, arg, argN:_*) def warning(msg: => String) { if (warning_?) logger warn msg diff --git a/akka-camel/src/main/scala/component/ActorComponent.scala b/akka-camel/src/main/scala/component/ActorComponent.scala index ccd4c63f0c..e0a70e255c 100644 --- a/akka-camel/src/main/scala/component/ActorComponent.scala +++ b/akka-camel/src/main/scala/component/ActorComponent.scala @@ -33,18 +33,15 @@ import scala.reflect.BeanProperty */ class ActorComponent extends DefaultComponent { def createEndpoint(uri: String, remaining: String, parameters: JavaMap[String, Object]): ActorEndpoint = { - val idAndUuid = idAndUuidPair(remaining) - new ActorEndpoint(uri, this, idAndUuid._1, idAndUuid._2) + val (id,uuid) = idAndUuidPair(remaining) + new ActorEndpoint(uri, this, id, uuid) } - private def idAndUuidPair(remaining: String): Tuple2[Option[String], Option[Uuid]] = { - remaining split ":" toList match { - case id :: Nil => (Some(id), None) - case "id" :: id :: Nil => (Some(id), None) - case "uuid" :: uuid :: Nil => (None, Some(uuidFrom(uuid))) - case _ => throw new IllegalArgumentException( - "invalid path format: %s - should be or id: or uuid:" format remaining) - } + private def idAndUuidPair(remaining: String): Tuple2[Option[String],Option[Uuid]] = remaining match { + case null | "" => throw new IllegalArgumentException("invalid path format: [%s] - should be or id: or uuid:" format remaining) + case id if id startsWith "id:" => (Some(id substring 3),None) + case uuid if uuid startsWith "uuid:" => (None,Some(uuidFrom(uuid substring 5))) + case id => (Some(id),None) } } diff --git a/akka-camel/src/test/scala/component/ActorComponentTest.scala b/akka-camel/src/test/scala/component/ActorComponentTest.scala index e27e8c5875..f35e8b3885 100644 --- a/akka-camel/src/test/scala/component/ActorComponentTest.scala +++ b/akka-camel/src/test/scala/component/ActorComponentTest.scala @@ -4,10 +4,13 @@ import org.apache.camel.{Endpoint, AsyncProcessor} import org.apache.camel.impl.DefaultCamelContext import org.junit._ import org.scalatest.junit.JUnitSuite +import se.scalablesolutions.akka.actor.uuidFrom class ActorComponentTest extends JUnitSuite { val component: ActorComponent = ActorComponentTest.actorComponent + def testUUID = uuidFrom("93da8c80-c3fd-11df-abed-60334b120057") + @Test def shouldCreateEndpointWithIdDefined = { val ep1: ActorEndpoint = component.createEndpoint("actor:abc").asInstanceOf[ActorEndpoint] val ep2: ActorEndpoint = component.createEndpoint("actor:id:abc").asInstanceOf[ActorEndpoint] @@ -20,15 +23,15 @@ class ActorComponentTest extends JUnitSuite { } @Test def shouldCreateEndpointWithUuidDefined = { - val ep: ActorEndpoint = component.createEndpoint("actor:uuid:abc").asInstanceOf[ActorEndpoint] - assert(ep.uuid === Some("abc")) + val ep: ActorEndpoint = component.createEndpoint("actor:uuid:" + testUUID).asInstanceOf[ActorEndpoint] + assert(ep.uuid === Some(testUUID)) assert(ep.id === None) assert(!ep.blocking) } @Test def shouldCreateEndpointWithBlockingSet = { - val ep: ActorEndpoint = component.createEndpoint("actor:uuid:abc?blocking=true").asInstanceOf[ActorEndpoint] - assert(ep.uuid === Some("abc")) + val ep: ActorEndpoint = component.createEndpoint("actor:uuid:"+testUUID+"?blocking=true").asInstanceOf[ActorEndpoint] + assert(ep.uuid === Some(testUUID)) assert(ep.id === None) assert(ep.blocking) } diff --git a/akka-remote/src/main/scala/remote/RemoteServer.scala b/akka-remote/src/main/scala/remote/RemoteServer.scala index a9841baf8c..bacaf22546 100644 --- a/akka-remote/src/main/scala/remote/RemoteServer.scala +++ b/akka-remote/src/main/scala/remote/RemoteServer.scala @@ -123,8 +123,8 @@ object RemoteServer { } private class RemoteActorSet { - private[RemoteServer] val actors = new ConcurrentHashMap[Object, ActorRef] - private[RemoteServer] val typedActors = new ConcurrentHashMap[Object, AnyRef] + private[RemoteServer] val actors = new ConcurrentHashMap[String, ActorRef] + private[RemoteServer] val typedActors = new ConcurrentHashMap[String, AnyRef] } private val guard = new ReadWriteGuard @@ -132,11 +132,11 @@ object RemoteServer { private val remoteServers = Map[Address, RemoteServer]() private[akka] def registerActor(address: InetSocketAddress, uuid: Uuid, actor: ActorRef) = guard.withWriteGuard { - actorsFor(RemoteServer.Address(address.getHostName, address.getPort)).actors.put(uuid, actor) + actorsFor(RemoteServer.Address(address.getHostName, address.getPort)).actors.put(uuid.toString, actor) } private[akka] def registerTypedActor(address: InetSocketAddress, uuid: Uuid, typedActor: AnyRef) = guard.withWriteGuard { - actorsFor(RemoteServer.Address(address.getHostName, address.getPort)).typedActors.put(uuid, typedActor) + actorsFor(RemoteServer.Address(address.getHostName, address.getPort)).typedActors.put(uuid.toString, typedActor) } private[akka] def getOrCreateServer(address: InetSocketAddress): RemoteServer = guard.withWriteGuard { diff --git a/akka-remote/src/main/scala/serialization/SerializationProtocol.scala b/akka-remote/src/main/scala/serialization/SerializationProtocol.scala index e94565ad97..7997be128b 100644 --- a/akka-remote/src/main/scala/serialization/SerializationProtocol.scala +++ b/akka-remote/src/main/scala/serialization/SerializationProtocol.scala @@ -231,7 +231,7 @@ object RemoteActorSerialization { } RemoteActorRefProtocol.newBuilder - .setClassOrServiceName(id) + .setClassOrServiceName(uuid.toString) .setActorClassname(actorClass.getName) .setHomeAddress(AddressProtocol.newBuilder.setHostname(host).setPort(port).build) .setTimeout(timeout) From ad213eac52e22ad8b2c33c2892c326b40c569d47 Mon Sep 17 00:00:00 2001 From: ticktock Date: Mon, 20 Sep 2010 21:25:20 -0400 Subject: [PATCH 13/52] fixing the formatting damage I did --- project/build/AkkaProject.scala | 510 +++++++++++++++----------------- 1 file changed, 246 insertions(+), 264 deletions(-) diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index baafa848f8..499440a741 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -17,14 +17,13 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- override def compileOptions = super.compileOptions ++ - Seq("-deprecation", - "-Xmigration", - "-Xcheckinit", - "-Xstrict-warnings", - "-Xwarninit", - "-encoding", "utf8") - .map(x => CompileOption(x)) - + Seq("-deprecation", + "-Xmigration", + "-Xcheckinit", + "-Xstrict-warnings", + "-Xwarninit", + "-encoding", "utf8") + .map(x => CompileOption(x)) override def javaCompileOptions = JavaCompileOption("-Xlint:unchecked") :: super.javaCompileOptions.toList // ------------------------------------------------------------------------------------------------------------------- @@ -33,28 +32,26 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val deployPath = info.projectPath / "deploy" lazy val distPath = info.projectPath / "dist" - def distName = "%s_%s-%s.zip".format(name, buildScalaVersion, version) - - lazy val dist = zipTask(allArtifacts, "dist", distName) dependsOn (`package`) describedAs ("Zips up the distribution.") + lazy val dist = zipTask(allArtifacts, "dist", distName) dependsOn (`package`) describedAs("Zips up the distribution.") // ------------------------------------------------------------------------------------------------------------------- // All repositories *must* go here! See ModuleConigurations below. // ------------------------------------------------------------------------------------------------------------------- object Repositories { - lazy val AkkaRepo = MavenRepository("Akka Repository", "http://scalablesolutions.se/akka/repository") - lazy val CodehausRepo = MavenRepository("Codehaus Repo", "http://repository.codehaus.org") - lazy val EmbeddedRepo = MavenRepository("Embedded Repo", (info.projectPath / "embedded-repo").asURL.toString) + lazy val AkkaRepo = MavenRepository("Akka Repository", "http://scalablesolutions.se/akka/repository") + lazy val CodehausRepo = MavenRepository("Codehaus Repo", "http://repository.codehaus.org") + lazy val EmbeddedRepo = MavenRepository("Embedded Repo", (info.projectPath / "embedded-repo").asURL.toString) lazy val FusesourceSnapshotRepo = MavenRepository("Fusesource Snapshots", "http://repo.fusesource.com/nexus/content/repositories/snapshots") - lazy val GuiceyFruitRepo = MavenRepository("GuiceyFruit Repo", "http://guiceyfruit.googlecode.com/svn/repo/releases/") - lazy val JBossRepo = MavenRepository("JBoss Repo", "https://repository.jboss.org/nexus/content/groups/public/") - lazy val JavaNetRepo = MavenRepository("java.net Repo", "http://download.java.net/maven/2") + lazy val GuiceyFruitRepo = MavenRepository("GuiceyFruit Repo", "http://guiceyfruit.googlecode.com/svn/repo/releases/") + lazy val JBossRepo = MavenRepository("JBoss Repo", "https://repository.jboss.org/nexus/content/groups/public/") + lazy val JavaNetRepo = MavenRepository("java.net Repo", "http://download.java.net/maven/2") lazy val SonatypeSnapshotRepo = MavenRepository("Sonatype OSS Repo", "http://oss.sonatype.org/content/repositories/releases") - lazy val SunJDMKRepo = MavenRepository("Sun JDMK Repo", "http://wp5.e-taxonomy.eu/cdmlib/mavenrepo") - lazy val CasbahRepoReleases = MavenRepository("Casbah Release Repo", "http://repo.bumnetworks.com/releases") - lazy val ClojarsRepo = MavenRepository("Clojars Repo", "http://clojars.org/repo") - lazy val OracleRepo = MavenRepository("Oracle Repo", "http://download.oracle.com/maven") + lazy val SunJDMKRepo = MavenRepository("Sun JDMK Repo", "http://wp5.e-taxonomy.eu/cdmlib/mavenrepo") + lazy val CasbahRepoReleases = MavenRepository("Casbah Release Repo", "http://repo.bumnetworks.com/releases") + lazy val ClojarsRepo = MavenRepository("Clojars Repo", "http://clojars.org/repo") + lazy val OracleRepo = MavenRepository("Oracle Repo", "http://download.oracle.com/maven") } // ------------------------------------------------------------------------------------------------------------------- @@ -65,45 +62,45 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- import Repositories._ - lazy val atmosphereModuleConfig = ModuleConfiguration("org.atmosphere", SonatypeSnapshotRepo) - lazy val jettyModuleConfig = ModuleConfiguration("org.eclipse.jetty", sbt.DefaultMavenRepository) + lazy val atmosphereModuleConfig = ModuleConfiguration("org.atmosphere", SonatypeSnapshotRepo) + lazy val jettyModuleConfig = ModuleConfiguration("org.eclipse.jetty", sbt.DefaultMavenRepository) lazy val guiceyFruitModuleConfig = ModuleConfiguration("org.guiceyfruit", GuiceyFruitRepo) // lazy val hawtdispatchModuleConfig = ModuleConfiguration("org.fusesource.hawtdispatch", FusesourceSnapshotRepo) - lazy val jbossModuleConfig = ModuleConfiguration("org.jboss", JBossRepo) - lazy val jdmkModuleConfig = ModuleConfiguration("com.sun.jdmk", SunJDMKRepo) - lazy val jmsModuleConfig = ModuleConfiguration("javax.jms", SunJDMKRepo) - lazy val jmxModuleConfig = ModuleConfiguration("com.sun.jmx", SunJDMKRepo) + lazy val jbossModuleConfig = ModuleConfiguration("org.jboss", JBossRepo) + lazy val jdmkModuleConfig = ModuleConfiguration("com.sun.jdmk", SunJDMKRepo) + lazy val jmsModuleConfig = ModuleConfiguration("javax.jms", SunJDMKRepo) + lazy val jmxModuleConfig = ModuleConfiguration("com.sun.jmx", SunJDMKRepo) lazy val jerseyContrModuleConfig = ModuleConfiguration("com.sun.jersey.contribs", JavaNetRepo) - lazy val jerseyModuleConfig = ModuleConfiguration("com.sun.jersey", JavaNetRepo) - lazy val jgroupsModuleConfig = ModuleConfiguration("jgroups", JBossRepo) - lazy val multiverseModuleConfig = ModuleConfiguration("org.multiverse", CodehausRepo) - lazy val nettyModuleConfig = ModuleConfiguration("org.jboss.netty", JBossRepo) - lazy val scalaTestModuleConfig = ModuleConfiguration("org.scalatest", ScalaToolsSnapshots) - lazy val logbackModuleConfig = ModuleConfiguration("ch.qos.logback", sbt.DefaultMavenRepository) - lazy val atomikosModuleConfig = ModuleConfiguration("com.atomikos", sbt.DefaultMavenRepository) - lazy val casbahRelease = ModuleConfiguration("com.novus", CasbahRepoReleases) - lazy val voldemortModuleConfig = ModuleConfiguration("voldemort", ClojarsRepo) - lazy val sleepycatModuleConfig = ModuleConfiguration("com.sleepycat", OracleRepo) - lazy val embeddedRepo = EmbeddedRepo // This is the only exception, because the embedded repo is fast! + lazy val jerseyModuleConfig = ModuleConfiguration("com.sun.jersey", JavaNetRepo) + lazy val jgroupsModuleConfig = ModuleConfiguration("jgroups", JBossRepo) + lazy val multiverseModuleConfig = ModuleConfiguration("org.multiverse", CodehausRepo) + lazy val nettyModuleConfig = ModuleConfiguration("org.jboss.netty", JBossRepo) + lazy val scalaTestModuleConfig = ModuleConfiguration("org.scalatest", ScalaToolsSnapshots) + lazy val logbackModuleConfig = ModuleConfiguration("ch.qos.logback",sbt.DefaultMavenRepository) + lazy val atomikosModuleConfig = ModuleConfiguration("com.atomikos",sbt.DefaultMavenRepository) + lazy val casbahRelease = ModuleConfiguration("com.novus",CasbahRepoReleases) + lazy val voldemortModuleConfig = ModuleConfiguration("voldemort", ClojarsRepo) + lazy val sleepycatModuleConfig = ModuleConfiguration("com.sleepycat", OracleRepo) + lazy val embeddedRepo = EmbeddedRepo // This is the only exception, because the embedded repo is fast! // ------------------------------------------------------------------------------------------------------------------- // Versions // ------------------------------------------------------------------------------------------------------------------- - lazy val ATMO_VERSION = "0.6.1" - lazy val CAMEL_VERSION = "2.4.0" - lazy val CASSANDRA_VERSION = "0.6.1" - lazy val DISPATCH_VERSION = "0.7.4" + lazy val ATMO_VERSION = "0.6.1" + lazy val CAMEL_VERSION = "2.4.0" + lazy val CASSANDRA_VERSION = "0.6.1" + lazy val DISPATCH_VERSION = "0.7.4" lazy val HAWT_DISPATCH_VERSION = "1.0" - lazy val JACKSON_VERSION = "1.2.1" - lazy val JERSEY_VERSION = "1.2" - lazy val MULTIVERSE_VERSION = "0.6.1" - lazy val SCALATEST_VERSION = "1.2-for-scala-2.8.0.final-SNAPSHOT" - lazy val LOGBACK_VERSION = "0.9.24" - lazy val SLF4J_VERSION = "1.6.0" - lazy val SPRING_VERSION = "3.0.3.RELEASE" - lazy val ASPECTWERKZ_VERSION = "2.2.1" - lazy val JETTY_VERSION = "7.1.4.v20100610" + lazy val JACKSON_VERSION = "1.2.1" + lazy val JERSEY_VERSION = "1.2" + lazy val MULTIVERSE_VERSION = "0.6.1" + lazy val SCALATEST_VERSION = "1.2-for-scala-2.8.0.final-SNAPSHOT" + lazy val LOGBACK_VERSION = "0.9.24" + lazy val SLF4J_VERSION = "1.6.0" + lazy val SPRING_VERSION = "3.0.3.RELEASE" + lazy val ASPECTWERKZ_VERSION = "2.2.1" + lazy val JETTY_VERSION = "7.1.4.v20100610" // ------------------------------------------------------------------------------------------------------------------- // Dependencies @@ -117,14 +114,14 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val aopalliance = "aopalliance" % "aopalliance" % "1.0" % "compile" - lazy val atmo = "org.atmosphere" % "atmosphere-annotations" % ATMO_VERSION % "compile" + lazy val atmo = "org.atmosphere" % "atmosphere-annotations" % ATMO_VERSION % "compile" lazy val atmo_jbossweb = "org.atmosphere" % "atmosphere-compat-jbossweb" % ATMO_VERSION % "compile" - lazy val atmo_jersey = "org.atmosphere" % "atmosphere-jersey" % ATMO_VERSION % "compile" - lazy val atmo_runtime = "org.atmosphere" % "atmosphere-runtime" % ATMO_VERSION % "compile" - lazy val atmo_tomcat = "org.atmosphere" % "atmosphere-compat-tomcat" % ATMO_VERSION % "compile" + lazy val atmo_jersey = "org.atmosphere" % "atmosphere-jersey" % ATMO_VERSION % "compile" + lazy val atmo_runtime = "org.atmosphere" % "atmosphere-runtime" % ATMO_VERSION % "compile" + lazy val atmo_tomcat = "org.atmosphere" % "atmosphere-compat-tomcat" % ATMO_VERSION % "compile" lazy val atmo_weblogic = "org.atmosphere" % "atmosphere-compat-weblogic" % ATMO_VERSION % "compile" - lazy val atomikos_transactions = "com.atomikos" % "transactions" % "3.2.3" % "compile" + lazy val atomikos_transactions = "com.atomikos" % "transactions" % "3.2.3" % "compile" lazy val atomikos_transactions_api = "com.atomikos" % "transactions-api" % "3.2.3" % "compile" lazy val atomikos_transactions_jta = "com.atomikos" % "transactions-jta" % "3.2.3" % "compile" @@ -143,9 +140,9 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val dispatch_http = "net.databinder" % "dispatch-http_2.8.0" % DISPATCH_VERSION % "compile" lazy val dispatch_json = "net.databinder" % "dispatch-json_2.8.0" % DISPATCH_VERSION % "compile" - lazy val jetty = "org.eclipse.jetty" % "jetty-server" % JETTY_VERSION % "compile" - lazy val jetty_util = "org.eclipse.jetty" % "jetty-util" % JETTY_VERSION % "compile" - lazy val jetty_xml = "org.eclipse.jetty" % "jetty-xml" % JETTY_VERSION % "compile" + lazy val jetty = "org.eclipse.jetty" % "jetty-server" % JETTY_VERSION % "compile" + lazy val jetty_util = "org.eclipse.jetty" % "jetty-util" % JETTY_VERSION % "compile" + lazy val jetty_xml = "org.eclipse.jetty" % "jetty-xml" % JETTY_VERSION % "compile" lazy val jetty_servlet = "org.eclipse.jetty" % "jetty-servlet" % JETTY_VERSION % "compile" lazy val guicey = "org.guiceyfruit" % "guice-all" % "2.0" % "compile" @@ -154,14 +151,14 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val hawtdispatch = "org.fusesource.hawtdispatch" % "hawtdispatch-scala" % HAWT_DISPATCH_VERSION % "compile" - lazy val jackson = "org.codehaus.jackson" % "jackson-mapper-asl" % JACKSON_VERSION % "compile" - lazy val jackson_core = "org.codehaus.jackson" % "jackson-core-asl" % JACKSON_VERSION % "compile" - lazy val jackson_core_asl = "org.codehaus.jackson" % "jackson-core-asl" % JACKSON_VERSION % "compile" + lazy val jackson = "org.codehaus.jackson" % "jackson-mapper-asl" % JACKSON_VERSION % "compile" + lazy val jackson_core = "org.codehaus.jackson" % "jackson-core-asl" % JACKSON_VERSION % "compile" + lazy val jackson_core_asl = "org.codehaus.jackson" % "jackson-core-asl" % JACKSON_VERSION % "compile" - lazy val jersey = "com.sun.jersey" % "jersey-core" % JERSEY_VERSION % "compile" - lazy val jersey_json = "com.sun.jersey" % "jersey-json" % JERSEY_VERSION % "compile" - lazy val jersey_server = "com.sun.jersey" % "jersey-server" % JERSEY_VERSION % "compile" - lazy val jersey_contrib = "com.sun.jersey.contribs" % "jersey-scala" % JERSEY_VERSION % "compile" + lazy val jersey = "com.sun.jersey" % "jersey-core" % JERSEY_VERSION % "compile" + lazy val jersey_json = "com.sun.jersey" % "jersey-json" % JERSEY_VERSION % "compile" + lazy val jersey_server = "com.sun.jersey" % "jersey-server" % JERSEY_VERSION % "compile" + lazy val jersey_contrib = "com.sun.jersey.contribs" % "jersey-scala" % JERSEY_VERSION % "compile" lazy val jgroups = "jgroups" % "jgroups" % "2.9.0.GA" % "compile" @@ -195,12 +192,12 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val sjson = "sjson.json" % "sjson" % "0.8-SNAPSHOT-2.8.0" % "compile" - lazy val slf4j = "org.slf4j" % "slf4j-api" % SLF4J_VERSION % "compile" + lazy val slf4j = "org.slf4j" % "slf4j-api" % SLF4J_VERSION % "compile" - lazy val logback = "ch.qos.logback" % "logback-classic" % LOGBACK_VERSION % "compile" + lazy val logback = "ch.qos.logback" % "logback-classic" % LOGBACK_VERSION % "compile" lazy val logback_core = "ch.qos.logback" % "logback-core" % LOGBACK_VERSION % "compile" - lazy val spring_beans = "org.springframework" % "spring-beans" % SPRING_VERSION % "compile" + lazy val spring_beans = "org.springframework" % "spring-beans" % SPRING_VERSION % "compile" lazy val spring_context = "org.springframework" % "spring-context" % SPRING_VERSION % "compile" lazy val stax_api = "javax.xml.stream" % "stax-api" % "1.0-2" % "compile" @@ -211,22 +208,22 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val voldemort_contrib = "voldemort" % "voldemort-contrib" % "0.81" % "compile" lazy val voldemort_needs_log4j = "org.slf4j" % "log4j-over-slf4j" % SLF4J_VERSION % "compile" - lazy val werkz = "org.codehaus.aspectwerkz" % "aspectwerkz-nodeps-jdk5" % ASPECTWERKZ_VERSION % "compile" - lazy val werkz_core = "org.codehaus.aspectwerkz" % "aspectwerkz-jdk5" % ASPECTWERKZ_VERSION % "compile" + lazy val werkz = "org.codehaus.aspectwerkz" % "aspectwerkz-nodeps-jdk5" % ASPECTWERKZ_VERSION % "compile" + lazy val werkz_core = "org.codehaus.aspectwerkz" % "aspectwerkz-jdk5" % ASPECTWERKZ_VERSION % "compile" // Test - lazy val camel_spring = "org.apache.camel" % "camel-spring" % CAMEL_VERSION % "test" - lazy val cassandra_clhm = "org.apache.cassandra" % "clhm-production" % CASSANDRA_VERSION % "test" - lazy val commons_coll = "commons-collections" % "commons-collections" % "3.2.1" % "test" - lazy val google_coll = "com.google.collections" % "google-collections" % "1.0" % "test" - lazy val high_scale = "org.apache.cassandra" % "high-scale-lib" % CASSANDRA_VERSION % "test" - lazy val testJetty = "org.eclipse.jetty" % "jetty-server" % JETTY_VERSION % "test" - lazy val testJettyWebApp = "org.eclipse.jetty" % "jetty-webapp" % JETTY_VERSION % "test" + lazy val camel_spring = "org.apache.camel" % "camel-spring" % CAMEL_VERSION % "test" + lazy val cassandra_clhm = "org.apache.cassandra" % "clhm-production" % CASSANDRA_VERSION % "test" + lazy val commons_coll = "commons-collections" % "commons-collections" % "3.2.1" % "test" + lazy val google_coll = "com.google.collections" % "google-collections" % "1.0" % "test" + lazy val high_scale = "org.apache.cassandra" % "high-scale-lib" % CASSANDRA_VERSION % "test" + lazy val testJetty = "org.eclipse.jetty" % "jetty-server" % JETTY_VERSION % "test" + lazy val testJettyWebApp= "org.eclipse.jetty" % "jetty-webapp" % JETTY_VERSION % "test" - lazy val junit = "junit" % "junit" % "4.5" % "test" - lazy val mockito = "org.mockito" % "mockito-all" % "1.8.1" % "test" - lazy val scalatest = "org.scalatest" % "scalatest" % SCALATEST_VERSION % "test" + lazy val junit = "junit" % "junit" % "4.5" % "test" + lazy val mockito = "org.mockito" % "mockito-all" % "1.8.1" % "test" + lazy val scalatest = "org.scalatest" % "scalatest" % SCALATEST_VERSION % "test" //voldemort testing /home/sclasen/projects/akka/akka-persistence-voldemort/src/test/resources/ lazy val jdom = "org.jdom" % "jdom" % "1.1" % "test" @@ -240,19 +237,19 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // Subprojects // ------------------------------------------------------------------------------------------------------------------- - lazy val akka_actor = project("akka-actor", "akka-actor", new AkkaActorProject(_)) + lazy val akka_actor = project("akka-actor", "akka-actor", new AkkaActorProject(_)) lazy val akka_typed_actor = project("akka-typed-actor", "akka-typed-actor", new AkkaTypedActorProject(_), akka_actor) - lazy val akka_remote = project("akka-remote", "akka-remote", new AkkaRemoteProject(_), akka_typed_actor) - lazy val akka_amqp = project("akka-amqp", "akka-amqp", new AkkaAMQPProject(_), akka_remote) - lazy val akka_http = project("akka-http", "akka-http", new AkkaHttpProject(_), akka_remote, akka_camel) - lazy val akka_camel = project("akka-camel", "akka-camel", new AkkaCamelProject(_), akka_remote) + lazy val akka_remote = project("akka-remote", "akka-remote", new AkkaRemoteProject(_), akka_typed_actor) + lazy val akka_amqp = project("akka-amqp", "akka-amqp", new AkkaAMQPProject(_), akka_remote) + lazy val akka_http = project("akka-http", "akka-http", new AkkaHttpProject(_), akka_remote, akka_camel) + lazy val akka_camel = project("akka-camel", "akka-camel", new AkkaCamelProject(_), akka_remote) lazy val akka_persistence = project("akka-persistence", "akka-persistence", new AkkaPersistenceParentProject(_)) - lazy val akka_spring = project("akka-spring", "akka-spring", new AkkaSpringProject(_), akka_remote, akka_camel) - lazy val akka_jta = project("akka-jta", "akka-jta", new AkkaJTAProject(_), akka_remote) - lazy val akka_kernel = project("akka-kernel", "akka-kernel", new AkkaKernelProject(_), - akka_remote, akka_jta, akka_http, akka_spring, akka_camel, akka_persistence, akka_amqp) - lazy val akka_osgi = project("akka-osgi", "akka-osgi", new AkkaOSGiParentProject(_)) - lazy val akka_samples = project("akka-samples", "akka-samples", new AkkaSamplesParentProject(_)) + lazy val akka_spring = project("akka-spring", "akka-spring", new AkkaSpringProject(_), akka_remote, akka_camel) + lazy val akka_jta = project("akka-jta", "akka-jta", new AkkaJTAProject(_), akka_remote) + lazy val akka_kernel = project("akka-kernel", "akka-kernel", new AkkaKernelProject(_), + akka_remote, akka_jta, akka_http, akka_spring, akka_camel, akka_persistence, akka_amqp) + lazy val akka_osgi = project("akka-osgi", "akka-osgi", new AkkaOSGiParentProject(_)) + lazy val akka_samples = project("akka-samples", "akka-samples", new AkkaSamplesParentProject(_)) // ------------------------------------------------------------------------------------------------------------------- // Miscellaneous @@ -266,37 +263,37 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { (IMPLEMENTATION_TITLE, "Akka"), (IMPLEMENTATION_URL, "http://akkasource.org"), (IMPLEMENTATION_VENDOR, "The Akka Project") - )).toList ::: - getMainClass(false).map(MainClass(_)).toList + )).toList ::: + getMainClass(false).map(MainClass(_)).toList // create a manifest with all akka jars and dependency jars on classpath override def manifestClassPath = Some(allArtifacts.getFiles - .filter(_.getName.endsWith(".jar")) - .filter(!_.getName.contains("servlet_2.4")) - .filter(!_.getName.contains("scala-library")) - .map("lib_managed/scala_%s/compile/".format(buildScalaVersion) + _.getName) - .mkString(" ") + - " config/" + - " scala-library.jar" + - " dist/akka-actor_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-typed-actor_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-remote_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-http_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-camel_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-amqp_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-persistence-common_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-persistence-redis_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-persistence-mongo_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-persistence-cassandra_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-kernel_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-spring_%s-%s.jar".format(buildScalaVersion, version) + - " dist/akka-jta_%s-%s.jar".format(buildScalaVersion, version) + .filter(_.getName.endsWith(".jar")) + .filter(!_.getName.contains("servlet_2.4")) + .filter(!_.getName.contains("scala-library")) + .map("lib_managed/scala_%s/compile/".format(buildScalaVersion) + _.getName) + .mkString(" ") + + " config/" + + " scala-library.jar" + + " dist/akka-actor_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-typed-actor_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-remote_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-http_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-camel_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-amqp_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-persistence-common_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-persistence-redis_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-persistence-mongo_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-persistence-cassandra_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-kernel_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-spring_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-jta_%s-%s.jar".format(buildScalaVersion, version) ) //Exclude slf4j1.5.11 from the classpath, it's conflicting... override def fullClasspath(config: Configuration): PathFinder = { super.fullClasspath(config) --- - (super.fullClasspath(config) ** "slf4j*1.5.11.jar") + (super.fullClasspath(config) ** "slf4j*1.5.11.jar") } override def mainResources = super.mainResources +++ @@ -317,60 +314,57 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { //override def documentOptions = encodingUtf8.map(SimpleDocOption(_)) override def packageDocsJar = defaultJarPath("-docs.jar") - - override def packageSrcJar = defaultJarPath("-sources.jar") - + override def packageSrcJar= defaultJarPath("-sources.jar") override def packageToPublishActions = super.packageToPublishActions ++ Seq(packageDocs, packageSrc) override def pomExtra = 2009 - http://akkasource.org - - Scalable Solutions AB - http://scalablesolutions.se - - - - Apache 2 - http://www.apache.org/licenses/LICENSE-2.0.txt - repo - - + http://akkasource.org + + Scalable Solutions AB + http://scalablesolutions.se + + + + Apache 2 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + // publish to local mvn import Process._ lazy val publishLocalMvn = runMvnInstall - def runMvnInstall = task { for (absPath <- akkaArtifacts.getPaths) { val artifactRE = """(.*)/dist/(.*)-(.*).jar""".r val artifactRE(path, artifactId, artifactVersion) = absPath val command = "mvn install:install-file" + - " -Dfile=" + absPath + - " -DgroupId=se.scalablesolutions.akka" + - " -DartifactId=" + artifactId + - " -Dversion=" + version + - " -Dpackaging=jar -DgeneratePom=true" + " -Dfile=" + absPath + + " -DgroupId=se.scalablesolutions.akka" + + " -DartifactId=" + artifactId + + " -Dversion=" + version + + " -Dpackaging=jar -DgeneratePom=true" command ! log } None - } dependsOn (dist) describedAs ("Run mvn install for artifacts in dist.") + } dependsOn(dist) describedAs("Run mvn install for artifacts in dist.") // ------------------------------------------------------------------------------------------------------------------- // akka-actor subproject // ------------------------------------------------------------------------------------------------------------------- class AkkaActorProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val configgy = Dependencies.configgy - val hawtdispatch = Dependencies.hawtdispatch - val multiverse = Dependencies.multiverse - val jsr166x = Dependencies.jsr166x - val slf4j = Dependencies.slf4j - val logback = Dependencies.logback - val logback_core = Dependencies.logback_core + val configgy = Dependencies.configgy + val hawtdispatch = Dependencies.hawtdispatch + val multiverse = Dependencies.multiverse + val jsr166x = Dependencies.jsr166x + val slf4j = Dependencies.slf4j + val logback = Dependencies.logback + val logback_core = Dependencies.logback_core // testing - val junit = Dependencies.junit + val junit = Dependencies.junit val scalatest = Dependencies.scalatest } @@ -379,13 +373,13 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- class AkkaTypedActorProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val aopalliance = Dependencies.aopalliance - val werkz = Dependencies.werkz - val werkz_core = Dependencies.werkz_core - val guicey = Dependencies.guicey + val aopalliance = Dependencies.aopalliance + val werkz = Dependencies.werkz + val werkz_core = Dependencies.werkz_core + val guicey = Dependencies.guicey // testing - val junit = Dependencies.junit + val junit = Dependencies.junit val scalatest = Dependencies.scalatest } @@ -395,22 +389,22 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaRemoteProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { val commons_codec = Dependencies.commons_codec - val commons_io = Dependencies.commons_io + val commons_io = Dependencies.commons_io val dispatch_http = Dependencies.dispatch_http val dispatch_json = Dependencies.dispatch_json - val guicey = Dependencies.guicey - val h2_lzf = Dependencies.h2_lzf - val jackson = Dependencies.jackson - val jackson_core = Dependencies.jackson_core - val jgroups = Dependencies.jgroups - val jta_1_1 = Dependencies.jta_1_1 - val netty = Dependencies.netty - val protobuf = Dependencies.protobuf - val sbinary = Dependencies.sbinary - val sjson = Dependencies.sjson + val guicey = Dependencies.guicey + val h2_lzf = Dependencies.h2_lzf + val jackson = Dependencies.jackson + val jackson_core = Dependencies.jackson_core + val jgroups = Dependencies.jgroups + val jta_1_1 = Dependencies.jta_1_1 + val netty = Dependencies.netty + val protobuf = Dependencies.protobuf + val sbinary = Dependencies.sbinary + val sjson = Dependencies.sjson // testing - val junit = Dependencies.junit + val junit = Dependencies.junit val scalatest = Dependencies.scalatest override def bndImportPackage = "javax.transaction;version=1.1" :: super.bndImportPackage.toList @@ -422,13 +416,13 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaAMQPProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { val commons_io = Dependencies.commons_io - val rabbit = Dependencies.rabbit - val protobuf = Dependencies.protobuf + val rabbit = Dependencies.rabbit + val protobuf = Dependencies.protobuf // testing - val junit = Dependencies.junit + val junit = Dependencies.junit val multiverse = Dependencies.multiverse - val scalatest = Dependencies.scalatest + val scalatest = Dependencies.scalatest } // ------------------------------------------------------------------------------------------------------------------- @@ -436,28 +430,28 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- class AkkaHttpProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val annotation = Dependencies.annotation - val atmo = Dependencies.atmo - val atmo_jbossweb = Dependencies.atmo_jbossweb - val atmo_jersey = Dependencies.atmo_jersey - val atmo_runtime = Dependencies.atmo_runtime - val atmo_tomcat = Dependencies.atmo_tomcat - val atmo_weblogic = Dependencies.atmo_weblogic - val jetty = Dependencies.jetty - val jetty_util = Dependencies.jetty_util - val jetty_xml = Dependencies.jetty_xml - val jetty_servlet = Dependencies.jetty_servlet + val annotation = Dependencies.annotation + val atmo = Dependencies.atmo + val atmo_jbossweb = Dependencies.atmo_jbossweb + val atmo_jersey = Dependencies.atmo_jersey + val atmo_runtime = Dependencies.atmo_runtime + val atmo_tomcat = Dependencies.atmo_tomcat + val atmo_weblogic = Dependencies.atmo_weblogic + val jetty = Dependencies.jetty + val jetty_util = Dependencies.jetty_util + val jetty_xml = Dependencies.jetty_xml + val jetty_servlet = Dependencies.jetty_servlet val jackson_core_asl = Dependencies.jackson_core_asl - val jersey = Dependencies.jersey - val jersey_contrib = Dependencies.jersey_contrib - val jersey_json = Dependencies.jersey_json - val jersey_server = Dependencies.jersey_server - val jsr311 = Dependencies.jsr311 - val stax_api = Dependencies.stax_api + val jersey = Dependencies.jersey + val jersey_contrib = Dependencies.jersey_contrib + val jersey_json = Dependencies.jersey_json + val jersey_server = Dependencies.jersey_server + val jsr311 = Dependencies.jsr311 + val stax_api = Dependencies.stax_api // testing - val junit = Dependencies.junit - val mockito = Dependencies.mockito + val junit = Dependencies.junit + val mockito = Dependencies.mockito val scalatest = Dependencies.scalatest } @@ -492,7 +486,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaPersistenceCommonProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { val commons_pool = Dependencies.commons_pool - val thrift = Dependencies.thrift + val thrift = Dependencies.thrift } // ------------------------------------------------------------------------------------------------------------------- @@ -501,7 +495,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaRedisProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { val commons_codec = Dependencies.commons_codec - val redis = Dependencies.redis + val redis = Dependencies.redis override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil } @@ -522,18 +516,17 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- class AkkaCassandraProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val cassandra = Dependencies.cassandra + val cassandra = Dependencies.cassandra // testing val cassandra_clhm = Dependencies.cassandra_clhm - val commons_coll = Dependencies.commons_coll - val google_coll = Dependencies.google_coll - val high_scale = Dependencies.high_scale + val commons_coll = Dependencies.commons_coll + val google_coll = Dependencies.google_coll + val high_scale = Dependencies.high_scale override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil } - // ------------------------------------------------------------------------------------------------------------------- // akka-persistence-voldemort subproject // ------------------------------------------------------------------------------------------------------------------- @@ -569,13 +562,13 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- class AkkaSpringProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val spring_beans = Dependencies.spring_beans + val spring_beans = Dependencies.spring_beans val spring_context = Dependencies.spring_context // testing val camel_spring = Dependencies.camel_spring - val junit = Dependencies.junit - val scalatest = Dependencies.scalatest + val junit = Dependencies.junit + val scalatest = Dependencies.scalatest } // ------------------------------------------------------------------------------------------------------------------- @@ -583,7 +576,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- class AkkaJTAProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val atomikos_transactions = Dependencies.atomikos_transactions + val atomikos_transactions = Dependencies.atomikos_transactions val atomikos_transactions_api = Dependencies.atomikos_transactions_api val atomikos_transactions_jta = Dependencies.atomikos_transactions_jta //val jta_1_1 = Dependencies.jta_1_1 @@ -599,18 +592,15 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { new AkkaOSGiDependenciesBundleProject(_), akka_kernel, akka_jta) // akka_kernel does not depend on akka_jta (why?) therefore we list akka_jta here lazy val akka_osgi_assembly = project("akka-osgi-assembly", "akka-osgi-assembly", new AkkaOSGiAssemblyProject(_), akka_osgi_dependencies_bundle, akka_remote, akka_amqp, akka_http, - akka_camel, akka_spring, akka_jta, akka_persistence.akka_persistence_common, - akka_persistence.akka_persistence_redis, akka_persistence.akka_persistence_mongo, - akka_persistence.akka_persistence_cassandra) + akka_camel, akka_spring, akka_jta, akka_persistence.akka_persistence_common, + akka_persistence.akka_persistence_redis, akka_persistence.akka_persistence_mongo, + akka_persistence.akka_persistence_cassandra,akka_persistence.akka_persistence_voldemort) } class AkkaOSGiDependenciesBundleProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with BNDPlugin { override def bndClasspath = compileClasspath - override def bndPrivatePackage = Seq("") - override def bndImportPackage = Seq("*;resolution:=optional") - override def bndExportPackage = Seq( "org.aopalliance.*;version=1.0.0", @@ -638,36 +628,36 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { val scala_bundle = "com.weiglewilczek.scala-lang-osgi" % "scala-library" % buildScalaVersion % "compile" intransitive // Camel bundles - val camel_core = Dependencies.camel_core.intransitive + val camel_core = Dependencies.camel_core.intransitive val fusesource_commonman = "org.fusesource.commonman" % "commons-management" % "1.0" intransitive // Spring bundles - val spring_beans = Dependencies.spring_beans.intransitive - val spring_context = Dependencies.spring_context.intransitive - val spring_aop = "org.springframework" % "spring-aop" % SPRING_VERSION % "compile" intransitive - val spring_asm = "org.springframework" % "spring-asm" % SPRING_VERSION % "compile" intransitive - val spring_core = "org.springframework" % "spring-core" % SPRING_VERSION % "compile" intransitive + val spring_beans = Dependencies.spring_beans.intransitive + val spring_context = Dependencies.spring_context.intransitive + val spring_aop = "org.springframework" % "spring-aop" % SPRING_VERSION % "compile" intransitive + val spring_asm = "org.springframework" % "spring-asm" % SPRING_VERSION % "compile" intransitive + val spring_core = "org.springframework" % "spring-core" % SPRING_VERSION % "compile" intransitive val spring_expression = "org.springframework" % "spring-expression" % SPRING_VERSION % "compile" intransitive - val spring_jms = "org.springframework" % "spring-jms" % SPRING_VERSION % "compile" intransitive - val spring_tx = "org.springframework" % "spring-tx" % SPRING_VERSION % "compile" intransitive + val spring_jms = "org.springframework" % "spring-jms" % SPRING_VERSION % "compile" intransitive + val spring_tx = "org.springframework" % "spring-tx" % SPRING_VERSION % "compile" intransitive - val commons_codec = Dependencies.commons_codec.intransitive - val commons_io = Dependencies.commons_io.intransitive - val commons_pool = Dependencies.commons_pool.intransitive - val guicey = Dependencies.guicey.intransitive - val jackson = Dependencies.jackson.intransitive - val jackson_core = Dependencies.jackson_core.intransitive - val jsr311 = Dependencies.jsr311.intransitive - val jta_1_1 = Dependencies.jta_1_1.intransitive - val netty = Dependencies.netty.intransitive - val commons_fileupload = "commons-fileupload" % "commons-fileupload" % "1.2.1" % "compile" intransitive - val jms_1_1 = "org.apache.geronimo.specs" % "geronimo-jms_1.1_spec" % "1.1.1" % "compile" intransitive - val joda = "joda-time" % "joda-time" % "1.6" intransitive + val commons_codec = Dependencies.commons_codec.intransitive + val commons_io = Dependencies.commons_io.intransitive + val commons_pool = Dependencies.commons_pool.intransitive + val guicey = Dependencies.guicey.intransitive + val jackson = Dependencies.jackson.intransitive + val jackson_core = Dependencies.jackson_core.intransitive + val jsr311 = Dependencies.jsr311.intransitive + val jta_1_1 = Dependencies.jta_1_1.intransitive + val netty = Dependencies.netty.intransitive + val commons_fileupload = "commons-fileupload" % "commons-fileupload" % "1.2.1" % "compile" intransitive + val jms_1_1 = "org.apache.geronimo.specs" % "geronimo-jms_1.1_spec" % "1.1.1" % "compile" intransitive + val joda = "joda-time" % "joda-time" % "1.6" intransitive override def packageAction = task { val libs: Seq[Path] = managedClasspath(config("compile")).get.toSeq - val prjs: Seq[Path] = info.dependencies.toSeq.asInstanceOf[Seq[DefaultProject]] map {_.jarPath} + val prjs: Seq[Path] = info.dependencies.toSeq.asInstanceOf[Seq[DefaultProject]] map { _.jarPath } val all = libs ++ prjs val destination = outputPath / "bundles" FileUtilities.copyFlat(all, destination, log) @@ -718,7 +708,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { - + @@ -729,8 +719,8 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaSampleSecurityProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) { val commons_codec = Dependencies.commons_codec - val jsr250 = Dependencies.jsr250 - val jsr311 = Dependencies.jsr311 + val jsr250 = Dependencies.jsr250 + val jsr311 = Dependencies.jsr311 } class AkkaSampleOSGiProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with BNDPlugin { @@ -767,71 +757,63 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- def removeDupEntries(paths: PathFinder) = - Path.lazyPathFinder { - val mapped = paths.get map {p => (p.relativePath, p)} - (Map() ++ mapped).values.toList - } + Path.lazyPathFinder { + val mapped = paths.get map { p => (p.relativePath, p) } + (Map() ++ mapped).values.toList + } def allArtifacts = { Path.fromFile(buildScalaInstance.libraryJar) +++ - (removeDupEntries(runClasspath filter ClasspathUtilities.isArchive) +++ - ((outputPath ##) / defaultJarName) +++ - mainResources +++ - mainDependencies.scalaJars +++ - descendents(info.projectPath / "scripts", "run_akka.sh") +++ - descendents(info.projectPath / "scripts", "akka-init-script.sh") +++ - descendents(info.projectPath / "dist", "*.jar") +++ - descendents(info.projectPath / "deploy", "*.jar") +++ - descendents(path("lib") ##, "*.jar") +++ - descendents(configurationPath(Configurations.Compile) ##, "*.jar")) - .filter(jar => // remove redundant libs - !jar.toString.endsWith("stax-api-1.0.1.jar") || - !jar.toString.endsWith("scala-library-2.7.7.jar") - ) + (removeDupEntries(runClasspath filter ClasspathUtilities.isArchive) +++ + ((outputPath ##) / defaultJarName) +++ + mainResources +++ + mainDependencies.scalaJars +++ + descendents(info.projectPath / "scripts", "run_akka.sh") +++ + descendents(info.projectPath / "scripts", "akka-init-script.sh") +++ + descendents(info.projectPath / "dist", "*.jar") +++ + descendents(info.projectPath / "deploy", "*.jar") +++ + descendents(path("lib") ##, "*.jar") +++ + descendents(configurationPath(Configurations.Compile) ##, "*.jar")) + .filter(jar => // remove redundant libs + !jar.toString.endsWith("stax-api-1.0.1.jar") || + !jar.toString.endsWith("scala-library-2.7.7.jar") + ) } - def akkaArtifacts = descendents(info.projectPath / "dist", "*" + buildScalaVersion + "-" + version + ".jar") + def akkaArtifacts = descendents(info.projectPath / "dist", "*" + buildScalaVersion + "-" + version + ".jar") // ------------------------------------------------------------ class AkkaDefaultProject(info: ProjectInfo, val deployPath: Path) extends DefaultProject(info) with DeployProject with OSGiProject { lazy val sourceArtifact = Artifact(this.artifactID, "sources", "jar", Some("sources"), Nil, None) lazy val docsArtifact = Artifact(this.artifactID, "docs", "jar", Some("docs"), Nil, None) - override def runClasspath = super.runClasspath +++ (AkkaParentProject.this.info.projectPath / "config") - override def testClasspath = super.testClasspath +++ (AkkaParentProject.this.info.projectPath / "config") - override def packageDocsJar = this.defaultJarPath("-docs.jar") - - override def packageSrcJar = this.defaultJarPath("-sources.jar") - + override def packageSrcJar = this.defaultJarPath("-sources.jar") override def packageToPublishActions = super.packageToPublishActions ++ Seq(this.packageDocs, this.packageSrc) } } -trait DeployProject { - self: BasicScalaProject => +trait DeployProject { self: BasicScalaProject => // defines where the deployTask copies jars to def deployPath: Path - lazy val dist = deployTask(jarPath, packageDocsJar, packageSrcJar, deployPath, true, true, true) dependsOn ( - `package`, packageDocs, packageSrc) describedAs ("Deploying") - + lazy val dist = deployTask(jarPath, packageDocsJar, packageSrcJar, deployPath, true, true, true) dependsOn( + `package`, packageDocs, packageSrc) describedAs("Deploying") def deployTask(jar: Path, docs: Path, src: Path, toDir: Path, genJar: Boolean, genDocs: Boolean, genSource: Boolean) = task { def gen(jar: Path, toDir: Path, flag: Boolean, msg: String): Option[String] = - if (flag) { - log.info(msg + " " + jar) - FileUtilities.copyFile(jar, toDir / jar.name, log) - } else None + if (flag) { + log.info(msg + " " + jar) + FileUtilities.copyFile(jar, toDir / jar.name, log) + } else None gen(jar, toDir, genJar, "Deploying bits") orElse - gen(docs, toDir, genDocs, "Deploying docs") orElse - gen(src, toDir, genSource, "Deploying sources") + gen(docs, toDir, genDocs, "Deploying docs") orElse + gen(src, toDir, genSource, "Deploying sources") } } -trait OSGiProject extends BNDPlugin { - self: DefaultProject => +trait OSGiProject extends BNDPlugin { self: DefaultProject => override def bndExportPackage = Seq("se.scalablesolutions.akka.*;version=%s".format(projectVersion.value)) } From 063dc6964c17075efd330b90162f581b84ba8802 Mon Sep 17 00:00:00 2001 From: ticktock Date: Mon, 20 Sep 2010 21:28:09 -0400 Subject: [PATCH 14/52] provide better voldemort configuration support, and defaults definition in akka-reference.conf, and made the backend more easily testable --- config/akka-reference.conf | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/config/akka-reference.conf b/config/akka-reference.conf index a66e12be0a..8758138df7 100644 --- a/config/akka-reference.conf +++ b/config/akka-reference.conf @@ -162,5 +162,18 @@ akka { hostname = "127.0.0.1" # IP address or hostname of the Redis instance port = 6379 # Port to Redis } + + voldemort { + store { + refs = "Refs" # Voldemort Store Used to Persist Refs. Use string serializer for keys, identity serializer for values + map-keys = "MapKeys" # Voldemort Store Used to Persist Map Keys. Use string serializer for keys, identity serializer for values + map-values = "MapValues" # Voldemort Store Used to Persist Map Values. Use identity serializer for keys, identity serializer for values + vector-sizes = "VectorSizes" # Voldemort Store Used to Persist Vector Sizes. Use string serializer for keys, identity serializer for values + vector-values = "VectorValues" # Voldemort Store Used to Persist Vector Values. Use identity serializer for keys, identity serializer for values + } + client { # The KeyValue pairs under client are converted to java Properties and used to construct the ClientConfig + bootstrap_urls = "tcp://localhost:6666" # All Valid Voldemort Client properties are valid here, in string form + } + } } } From a5e67d05f2266b38033bee7637a75bbdaecc44e8 Mon Sep 17 00:00:00 2001 From: ticktock Date: Mon, 20 Sep 2010 21:29:17 -0400 Subject: [PATCH 15/52] provide better voldemort configuration support, and defaults definition in akka-reference.conf, and made the backend more easily testable --- .../main/scala/VoldemortStorageBackend.scala | 76 +++++++++++++------ .../src/test/scala/EmbeddedVoldemort.scala | 3 +- 2 files changed, 55 insertions(+), 24 deletions(-) diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala index 77fd7acedb..6331378356 100644 --- a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala +++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala @@ -16,36 +16,34 @@ import voldemort.utils.ByteUtils import voldemort.versioning.Versioned import collection.JavaConversions import java.nio.ByteBuffer -import collection.immutable.{IndexedSeq, SortedSet, TreeSet} -import collection.mutable.{Map, Set, HashSet, ArrayBuffer} -import java.util.{Map => JMap} - +import collection.Map +import collection.immutable.{IndexedSeq, SortedSet, TreeSet, HashMap} +import collection.mutable.{Set, HashSet, ArrayBuffer} +import java.util.{Properties, Map => JMap} private[akka] object VoldemortStorageBackend extends MapStorageBackend[Array[Byte], Array[Byte]] with VectorStorageBackend[Array[Byte]] with RefStorageBackend[Array[Byte]] with Logging { - val bootstrapUrl: String = config.getString("akka.storage.voldemort.bootstrap.url", "tcp://localhost:6666") - val refStore = config.getString("akka.storage.voldemort.store.ref", "Refs") - val mapKeyStore = config.getString("akka.storage.voldemort.store.map.key", "MapKeys") - val mapValueStore = config.getString("akka.storage.voldemort.store.map.value", "MapValues") - val vectorSizeStore = config.getString("akka.storage.voldemort.store.vector.size", "VectorSizes") - val vectorValueStore = config.getString("akka.storage.voldemort.store.vectore.value", "VectorValues") - val storeClientFactory = { - if (bootstrapUrl.startsWith("tcp")) { - new SocketStoreClientFactory(new ClientConfig().setBootstrapUrls(bootstrapUrl)) - } else if (bootstrapUrl.startsWith("http")) { - new HttpStoreClientFactory(new ClientConfig().setBootstrapUrls(bootstrapUrl)) - } else { - throw new IllegalArgumentException("Unknown boostrapUrl syntax" + bootstrapUrl) - } + val clientConfig = config.getConfigMap("akka.storage.voldemort.client") match { + case Some(configMap) => getClientConfig(configMap.asMap) + case None => getClientConfig(new HashMap[String, String] + ("boostrap_urls" -> "tcp://localhost:6666")) } - var refClient: StoreClient[String, Array[Byte]] = storeClientFactory.getStoreClient(refStore) - var mapKeyClient: StoreClient[String, Array[Byte]] = storeClientFactory.getStoreClient(mapKeyStore) - var mapValueClient: StoreClient[Array[Byte], Array[Byte]] = storeClientFactory.getStoreClient(mapValueStore) - var vectorSizeClient: StoreClient[String, Array[Byte]] = storeClientFactory.getStoreClient(vectorSizeStore) - var vectorValueClient: StoreClient[Array[Byte], Array[Byte]] = storeClientFactory.getStoreClient(vectorValueStore) + val refStore = config.getString("akka.storage.voldemort.store.ref", "Refs") + val mapKeyStore = config.getString("akka.storage.voldemort.store.map-key", "MapKeys") + val mapValueStore = config.getString("akka.storage.voldemort.store.map-value", "MapValues") + val vectorSizeStore = config.getString("akka.storage.voldemort.store.vector-size", "VectorSizes") + val vectorValueStore = config.getString("akka.storage.voldemort.store.vector-value", "VectorValues") + + var storeClientFactory: StoreClientFactory = null + var refClient: StoreClient[String, Array[Byte]] = null + var mapKeyClient: StoreClient[String, Array[Byte]] = null + var mapValueClient: StoreClient[Array[Byte], Array[Byte]] = null + var vectorSizeClient: StoreClient[String, Array[Byte]] = null + var vectorValueClient: StoreClient[Array[Byte], Array[Byte]] = null + initStoreClients + val underscoreBytesUTF8 = "_".getBytes("UTF-8") implicit val byteOrder = new Ordering[Array[Byte]] { override def compare(x: Array[Byte], y: Array[Byte]) = ByteUtils.compare(x, y) @@ -245,6 +243,38 @@ MapStorageBackend[Array[Byte], Array[Byte]] with IntSerializer.fromBytes(indexBytes) } + + def getClientConfig(configMap: Map[String, String]): Properties = { + val properites = new Properties + configMap.foreach { + keyval => keyval match { + case (key, value) => properites.setProperty(key.asInstanceOf[java.lang.String], value.asInstanceOf[java.lang.String]) + } + } + properites + } + + def initStoreClients() = { + if (storeClientFactory != null) { + storeClientFactory.close + } + + storeClientFactory = { + if (clientConfig.getProperty("bootstrap_urls", "none").startsWith("tcp")) { + new SocketStoreClientFactory(new ClientConfig(clientConfig)) + } else if (clientConfig.getProperty("bootstrap_urls", "none").startsWith("http")) { + new HttpStoreClientFactory(new ClientConfig(clientConfig)) + } else { + throw new IllegalArgumentException("Unknown boostrapUrl syntax" + clientConfig.getProperty("boostrap_urls", "No Bootstrap URLs defined")) + } + } + refClient = storeClientFactory.getStoreClient(refStore) + mapKeyClient = storeClientFactory.getStoreClient(mapKeyStore) + mapValueClient = storeClientFactory.getStoreClient(mapValueStore) + vectorSizeClient = storeClientFactory.getStoreClient(vectorSizeStore) + vectorValueClient = storeClientFactory.getStoreClient(vectorValueStore) + } + object IntSerializer { val bytesPerInt = java.lang.Integer.SIZE / java.lang.Byte.SIZE diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala index 395825152e..034b493006 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala @@ -15,7 +15,7 @@ trait EmbeddedVoldemort extends BeforeAndAfterAll with Logging { var server: VoldemortServer = null override protected def beforeAll(): Unit = { - + try { val dir = "./akka-persistence/akka-persistence-voldemort/target/scala_2.8.0/test-resources" val home = new File(dir) @@ -24,6 +24,7 @@ trait EmbeddedVoldemort extends BeforeAndAfterAll with Logging { log.info("Starting Voldemort") server = new VoldemortServer(config) server.start + VoldemortStorageBackend.initStoreClients log.info("Started") } catch { case e => log.error(e, "Error Starting Voldemort") From 8b719b4105c5ddf5b439b422ea13278c5606ba87 Mon Sep 17 00:00:00 2001 From: ticktock Date: Tue, 21 Sep 2010 11:00:28 -0400 Subject: [PATCH 16/52] adding sjson as a test dependency to voldemort persistence --- project/build/AkkaProject.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index c6ccd783f3..6d800cdc36 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -191,6 +191,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val sbinary = "sbinary" % "sbinary" % "2.8.0-0.3.1" % "compile" lazy val sjson = "sjson.json" % "sjson" % "0.8-2.8.0" % "compile" + lazy val sjson_test = "sjson.json" % "sjson" % "0.8-2.8.0" % "test" lazy val slf4j = "org.slf4j" % "slf4j-api" % SLF4J_VERSION % "compile" @@ -544,6 +545,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { val velocity = Dependencies.velocity val bdb = Dependencies.bdb val dbcp = Dependencies.dbcp + val sjson = Dependencies.sjson_test override def testOptions = TestFilter((name: String) => name.endsWith("Suite")) :: Nil } From 517888ce2bd956c7391f9319b3e113862f8a417b Mon Sep 17 00:00:00 2001 From: ticktock Date: Tue, 21 Sep 2010 11:29:55 -0400 Subject: [PATCH 17/52] Adding a direct test of PersistentRef, since after merging master over, something is blowing up there with the Actor tests --- .../main/scala/VoldemortStorageBackend.scala | 9 ++-- ...oldemortPersistentDatastructureSuite.scala | 46 +++++++++++++++++++ 2 files changed, 51 insertions(+), 4 deletions(-) create mode 100644 akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentDatastructureSuite.scala diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala index 6331378356..83b74a4a05 100644 --- a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala +++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala @@ -26,9 +26,10 @@ MapStorageBackend[Array[Byte], Array[Byte]] with VectorStorageBackend[Array[Byte]] with RefStorageBackend[Array[Byte]] with Logging { + val bootstrapUrlsProp = "bootstrap_urls" val clientConfig = config.getConfigMap("akka.storage.voldemort.client") match { case Some(configMap) => getClientConfig(configMap.asMap) - case None => getClientConfig(new HashMap[String, String] + ("boostrap_urls" -> "tcp://localhost:6666")) + case None => getClientConfig(new HashMap[String, String] + (bootstrapUrlsProp -> "tcp://localhost:6666")) } val refStore = config.getString("akka.storage.voldemort.store.ref", "Refs") val mapKeyStore = config.getString("akka.storage.voldemort.store.map-key", "MapKeys") @@ -260,12 +261,12 @@ MapStorageBackend[Array[Byte], Array[Byte]] with } storeClientFactory = { - if (clientConfig.getProperty("bootstrap_urls", "none").startsWith("tcp")) { + if (clientConfig.getProperty(bootstrapUrlsProp, "none").startsWith("tcp")) { new SocketStoreClientFactory(new ClientConfig(clientConfig)) - } else if (clientConfig.getProperty("bootstrap_urls", "none").startsWith("http")) { + } else if (clientConfig.getProperty(bootstrapUrlsProp, "none").startsWith("http")) { new HttpStoreClientFactory(new ClientConfig(clientConfig)) } else { - throw new IllegalArgumentException("Unknown boostrapUrl syntax" + clientConfig.getProperty("boostrap_urls", "No Bootstrap URLs defined")) + throw new IllegalArgumentException("Unknown boostrapUrl syntax" + clientConfig.getProperty(bootstrapUrlsProp, "No Bootstrap URLs defined")) } } refClient = storeClientFactory.getStoreClient(refStore) diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentDatastructureSuite.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentDatastructureSuite.scala new file mode 100644 index 0000000000..8c439f8d85 --- /dev/null +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentDatastructureSuite.scala @@ -0,0 +1,46 @@ +package se.scalablesolutions.akka.persistence.voldemort + +import org.scalatest.FunSuite +import org.scalatest.matchers.ShouldMatchers +import org.junit.runner.RunWith +import org.scalatest.junit.JUnitRunner +import se.scalablesolutions.akka.persistence.voldemort.VoldemortStorageBackend._ +import se.scalablesolutions.akka.util.{Logging, UUID} +import collection.immutable.TreeSet +import VoldemortStorageBackendSuite._ + +import se.scalablesolutions.akka.stm._ +import se.scalablesolutions.akka.stm.global._ +import se.scalablesolutions.akka.config.ScalaConfig._ +import se.scalablesolutions.akka.persistence.common._ +import se.scalablesolutions.akka.util.Logging +import se.scalablesolutions.akka.config.Config.config + +@RunWith(classOf[JUnitRunner]) +class VoldemortPersistentDatastructureSuite extends FunSuite with ShouldMatchers with EmbeddedVoldemort with Logging { + test("persistentRefs work as expected") { + val name = UUID.newUuid.toString + val one = "one".getBytes + atomic { + val ref = VoldemortStorage.getRef(name) + ref.isDefined should be(false) + ref.swap(one) + ref.get match { + case Some(bytes) => bytes should be(one) + case None => true should be(false) + } + } + val two = "two".getBytes + atomic { + val ref = VoldemortStorage.getRef(name) + ref.isDefined should be(true) + ref.swap(two) + ref.get match { + case Some(bytes) => bytes should be(two) + case None => true should be(false) + } + } + } + + +} \ No newline at end of file From bc2ee5793df2b1fefaed921ccc212db7c0fdb2d2 Mon Sep 17 00:00:00 2001 From: ticktock Date: Tue, 21 Sep 2010 15:35:18 -0400 Subject: [PATCH 18/52] making the persistent data sturctures non lazy in the ActorTest made things work...hmm seems strange though --- .../test/resources/config/server.properties | 1 + .../scala/VoldemortPersistentActorSuite.scala | 16 +++++--- ...oldemortPersistentDatastructureSuite.scala | 41 +++++++++++++++++++ 3 files changed, 52 insertions(+), 6 deletions(-) diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties index 1e6af91e2d..8f5a8ff884 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties +++ b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties @@ -1 +1,2 @@ node.id=0 +enable.rebalancing=false diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala index ae575e1e96..f76c370667 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala @@ -24,8 +24,8 @@ object BankAccountActor { } class BankAccountActor extends Transactor { - private lazy val accountState = VoldemortStorage.newMap(state) - private lazy val txnLog = VoldemortStorage.newVector(tx) + private val accountState = VoldemortStorage.newMap(state) + private val txnLog = VoldemortStorage.newVector(tx) import sjson.json.DefaultProtocol._ import sjson.json.JsonSerialization._ @@ -122,21 +122,25 @@ Spec with describe("successful debit") { it("should debit successfully") { + log.info("Succesful Debit starting") val bactor = actorOf[BankAccountActor] bactor.start val failer = actorOf[PersistentFailerActor] failer.start bactor !! Credit("a-123", 5000) + log.info("credited") bactor !! Debit("a-123", 3000, failer) - + log.info("debited") (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(2000) - + log.info("balane matched") bactor !! Credit("a-123", 7000) + log.info("Credited") (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(9000) - + log.info("Balance matched") bactor !! Debit("a-123", 8000, failer) + log.info("Debited") (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(1000) - + log.info("Balance matched") (bactor !! LogSize).get.asInstanceOf[Int] should equal(7) (bactor !! Log(0, 7)).get.asInstanceOf[Iterable[String]].size should equal(7) } diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentDatastructureSuite.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentDatastructureSuite.scala index 8c439f8d85..89e7426e03 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentDatastructureSuite.scala +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentDatastructureSuite.scala @@ -43,4 +43,45 @@ class VoldemortPersistentDatastructureSuite extends FunSuite with ShouldMatchers } + test("Persistent Vectors function as expected") { + val name = UUID.newUuid.toString + val one = "one".getBytes + val two = "two".getBytes + atomic { + val vec = VoldemortStorage.getVector(name) + vec.add(one) + } + atomic { + val vec = VoldemortStorage.getVector(name) + vec.size should be(1) + vec.add(two) + } + atomic { + val vec = VoldemortStorage.getVector(name) + + vec.get(0) should be(one) + vec.get(1) should be(two) + vec.size should be(2) + vec.update(0, two) + } + + atomic { + val vec = VoldemortStorage.getVector(name) + vec.get(0) should be(two) + vec.get(1) should be(two) + vec.size should be(2) + vec.update(0, Array.empty[Byte]) + vec.update(1, Array.empty[Byte]) + } + + atomic { + val vec = VoldemortStorage.getVector(name) + vec.get(0) should be(Array.empty[Byte]) + vec.get(1) should be(Array.empty[Byte]) + vec.size should be(2) + } + + + } + } \ No newline at end of file From 6afad7a09782fa1da8a4a3aa55024eea2cbd383b Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Wed, 22 Sep 2010 11:37:23 +0200 Subject: [PATCH 19/52] Preparing to add UUIDs to RemoteServer as well --- akka-remote/src/main/scala/remote/RemoteServer.scala | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/akka-remote/src/main/scala/remote/RemoteServer.scala b/akka-remote/src/main/scala/remote/RemoteServer.scala index 8784a8c81f..fadc5ce60a 100644 --- a/akka-remote/src/main/scala/remote/RemoteServer.scala +++ b/akka-remote/src/main/scala/remote/RemoteServer.scala @@ -314,7 +314,7 @@ class RemoteServer extends Logging with ListenerManagement { } } - private def register(id: String, actorRef: ActorRef, registry: ConcurrentHashMap[String, ActorRef]) { + private def register[Key](id: Key, actorRef: ActorRef, registry: ConcurrentHashMap[Key, ActorRef]) { if (_isRunning) { if (!registry.contains(id)) { if (!actorRef.isRunning) actorRef.start @@ -323,7 +323,7 @@ class RemoteServer extends Logging with ListenerManagement { } } - private def registerTypedActor(id: String, typedActor: AnyRef, registry: ConcurrentHashMap[String, AnyRef]) { + private def registerTypedActor[Key](id: Key, typedActor: AnyRef, registry: ConcurrentHashMap[Key, AnyRef]) { if (_isRunning) { if (!registry.contains(id)) { registry.put(id, typedActor) @@ -337,8 +337,7 @@ class RemoteServer extends Logging with ListenerManagement { def unregister(actorRef: ActorRef):Unit = synchronized { if (_isRunning) { log.debug("Unregistering server side remote actor [%s] with id [%s:%s]", actorRef.actorClass.getName, actorRef.id, actorRef.uuid) - val actorMap = actors() - actorMap remove actorRef.id + actors() remove actorRef.id if (actorRef.registeredInRemoteNodeDuringSerialization) actorsByUuid() remove actorRef.uuid } } From 70ac95048b3fabd781e14f812c2a1d59748665fd Mon Sep 17 00:00:00 2001 From: Debasish Ghosh Date: Wed, 22 Sep 2010 16:19:42 +0530 Subject: [PATCH 20/52] Integrated sjson type class based serialization into Akka - some backward incompatible changes there --- .../main/scala/remote/MessageSerializer.scala | 12 ++-- .../scala/serialization/Serializable.scala | 26 ++++++- .../main/scala/serialization/Serializer.scala | 21 +++++- .../ScalaJSONSerializableSpec.scala | 67 +++++++++++++++++++ .../ScalaJSONSerializerSpec.scala | 52 ++++++++++++++ .../SerializableTypeClassActorSpec.scala | 52 ++++++++++++++ .../scala/serialization/Ticket436Spec.scala | 49 -------------- 7 files changed, 221 insertions(+), 58 deletions(-) create mode 100644 akka-remote/src/test/scala/serialization/ScalaJSONSerializableSpec.scala create mode 100644 akka-remote/src/test/scala/serialization/ScalaJSONSerializerSpec.scala delete mode 100644 akka-remote/src/test/scala/serialization/Ticket436Spec.scala diff --git a/akka-remote/src/main/scala/remote/MessageSerializer.scala b/akka-remote/src/main/scala/remote/MessageSerializer.scala index 49f38524f9..7cda9e4b4c 100644 --- a/akka-remote/src/main/scala/remote/MessageSerializer.scala +++ b/akka-remote/src/main/scala/remote/MessageSerializer.scala @@ -28,17 +28,21 @@ object MessageSerializer extends Logging { messageProtocol.getSerializationScheme match { case SerializationSchemeType.JAVA => unbox(SERIALIZER_JAVA.fromBinary(messageProtocol.getMessage.toByteArray, None)) + case SerializationSchemeType.PROTOBUF => val clazz = loadManifest(SERIALIZER_PROTOBUF.classLoader, messageProtocol) SERIALIZER_PROTOBUF.fromBinary(messageProtocol.getMessage.toByteArray, Some(clazz)) + case SerializationSchemeType.SBINARY => val clazz = loadManifest(SERIALIZER_SBINARY.classLoader, messageProtocol) val renderer = clazz.newInstance.asInstanceOf[Serializable.SBinary[_ <: AnyRef]] renderer.fromBytes(messageProtocol.getMessage.toByteArray) + case SerializationSchemeType.SCALA_JSON => val clazz = loadManifest(SERIALIZER_SCALA_JSON.classLoader, messageProtocol) - import scala.reflect._ - SERIALIZER_SCALA_JSON.fromBinary(messageProtocol.getMessage.toByteArray)(Manifest.classType(clazz)) + val renderer = clazz.newInstance.asInstanceOf[Serializable.ScalaJSON[_]] + renderer.fromBytes(messageProtocol.getMessage.toByteArray) + case SerializationSchemeType.JAVA_JSON => val clazz = loadManifest(SERIALIZER_JAVA_JSON.classLoader, messageProtocol) SERIALIZER_JAVA_JSON.fromBinary(messageProtocol.getMessage.toByteArray, Some(clazz)) @@ -52,9 +56,9 @@ object MessageSerializer extends Logging { builder.setSerializationScheme(SerializationSchemeType.PROTOBUF) builder.setMessage(ByteString.copyFrom(serializable.toByteArray)) builder.setMessageManifest(ByteString.copyFromUtf8(serializable.getClass.getName)) - } else if (message.isInstanceOf[Serializable.ScalaJSON]) { + } else if (message.isInstanceOf[Serializable.ScalaJSON[_]]) { builder.setSerializationScheme(SerializationSchemeType.SCALA_JSON) - setMessageAndManifest(builder, message.asInstanceOf[Serializable.ScalaJSON]) + setMessageAndManifest(builder, message.asInstanceOf[Serializable.ScalaJSON[_ <: Any]]) } else if (message.isInstanceOf[Serializable.SBinary[_]]) { builder.setSerializationScheme(SerializationSchemeType.SBINARY) setMessageAndManifest(builder, message.asInstanceOf[Serializable.SBinary[_ <: Any]]) diff --git a/akka-remote/src/main/scala/serialization/Serializable.scala b/akka-remote/src/main/scala/serialization/Serializable.scala index 216869d7a6..c446dbbe59 100644 --- a/akka-remote/src/main/scala/serialization/Serializable.scala +++ b/akka-remote/src/main/scala/serialization/Serializable.scala @@ -91,10 +91,32 @@ object Serializable { } /** + * case class Address(street: String, city: String, zip: String) + * extends ScalaJSON[Address] { + * + * implicit val AddressFormat: Format[Address] = + * asProduct3("street", "city", "zip")(Address)(Address.unapply(_).get) + * + * import dispatch.json._ + * import sjson.json._ + * import sjson.json.JsonSerialization._ + * + * def toJSON: String = JsValue.toJson(tojson(this)) + * def toBytes: Array[Byte] = tobinary(this) + * def fromBytes(bytes: Array[Byte]): Address = frombinary[Address](bytes) + * def fromJSON(js: String): Address = fromjson[Address](Js(js)) + * } + * + * val a = Address(...) + * val js = tojson(a) + * val add = fromjson[Address](js) + * * @author Jonas Bonér */ - trait ScalaJSON extends JSON { + trait ScalaJSON[T] extends JSON { def toJSON: String = new String(toBytes, "UTF-8") - def toBytes: Array[Byte] = SJSONSerializer.SJSON.out(this) + def fromJSON(js: String): T + def toBytes: Array[Byte] + def fromBytes(bytes: Array[Byte]): T } } diff --git a/akka-remote/src/main/scala/serialization/Serializer.scala b/akka-remote/src/main/scala/serialization/Serializer.scala index 1365a7d4c1..d4950b323e 100644 --- a/akka-remote/src/main/scala/serialization/Serializer.scala +++ b/akka-remote/src/main/scala/serialization/Serializer.scala @@ -128,11 +128,25 @@ object Serializer { /** * @author Jonas Bonér */ - object ScalaJSON extends ScalaJSON - trait ScalaJSON extends Serializer { + trait ScalaJSON { + import dispatch.json._ + import sjson.json._ + import sjson.json.JsonSerialization + + var classLoader: Option[ClassLoader] = None + + def tojson[T](o: T)(implicit tjs: Writes[T]): JsValue = JsonSerialization.tojson(o)(tjs) + + def fromjson[T](json: JsValue)(implicit fjs: Reads[T]): T = JsonSerialization.fromjson(json)(fjs) + + def tobinary[T](o: T)(implicit tjs: Writes[T]): Array[Byte] = JsonSerialization.tobinary(o)(tjs) + + def frombinary[T](bytes: Array[Byte])(implicit fjs: Reads[T]): T = JsonSerialization.frombinary(bytes)(fjs) + + // backward compatibility + // implemented using refelction based json serialization def toBinary(obj: AnyRef): Array[Byte] = SJSONSerializer.SJSON.out(obj) - // FIXME set ClassLoader on SJSONSerializer.SJSON def fromBinary(bytes: Array[Byte], clazz: Option[Class[_]]): AnyRef = SJSONSerializer.SJSON.in(bytes) import scala.reflect.Manifest @@ -144,6 +158,7 @@ object Serializer { SJSONSerializer.SJSON.in(bytes)(m) } } + object ScalaJSON extends ScalaJSON /** * @author Jonas Bonér diff --git a/akka-remote/src/test/scala/serialization/ScalaJSONSerializableSpec.scala b/akka-remote/src/test/scala/serialization/ScalaJSONSerializableSpec.scala new file mode 100644 index 0000000000..0ca548d4e1 --- /dev/null +++ b/akka-remote/src/test/scala/serialization/ScalaJSONSerializableSpec.scala @@ -0,0 +1,67 @@ +package se.scalablesolutions.akka.serialization + +import org.scalatest.Spec +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import se.scalablesolutions.akka.serialization.Serializable.ScalaJSON + +object Serializables { + import sjson.json.DefaultProtocol._ + case class Shop(store: String, item: String, price: Int) extends + ScalaJSON[Shop] { + implicit val ShopFormat: sjson.json.Format[Shop] = + asProduct3("store", "item", "price")(Shop)(Shop.unapply(_).get) + + import dispatch.json._ + import sjson.json._ + import sjson.json.JsonSerialization._ + + def toBytes: Array[Byte] = tobinary(this) + def fromBytes(bytes: Array[Byte]) = frombinary[Shop](bytes) + def fromJSON(js: String) = fromjson[Shop](Js(js)) + } + + case class MyMessage(val id: String, val value: Tuple2[String, Int]) + implicit val MyMessageFormat: sjson.json.Format[MyMessage] = + asProduct2("id", "value")(MyMessage)(MyMessage.unapply(_).get) + + case class MyJsonObject(val key: String, val map: Map[String, Int], + val standAloneInt: Int) extends ScalaJSON[MyJsonObject] { + implicit val MyJsonObjectFormat: sjson.json.Format[MyJsonObject] = + asProduct3("key", "map", "standAloneInt")(MyJsonObject)(MyJsonObject.unapply(_).get) + + import dispatch.json._ + import sjson.json._ + import sjson.json.JsonSerialization._ + + def toBytes: Array[Byte] = tobinary(this) + def fromBytes(bytes: Array[Byte]) = frombinary[MyJsonObject](bytes) + def fromJSON(js: String) = fromjson[MyJsonObject](Js(js)) + } +} + +@RunWith(classOf[JUnitRunner]) +class ScalaJSONSerializableSpec extends + Spec with + ShouldMatchers with + BeforeAndAfterAll { + + import Serializables._ + describe("Serialization of case classes") { + it("should be able to serialize and de-serialize") { + val s = Shop("Target", "cooker", 120) + s.fromBytes(s.toBytes) should equal(s) + s.fromJSON(s.toJSON) should equal(s) + + val key: String = "myKey" + val value: Int = 123 + val standAloneInt: Int = 35 + val message = MyJsonObject(key, Map(key -> value), standAloneInt) + message.fromBytes(message.toBytes) should equal(message) + message.fromJSON(message.toJSON) should equal(message) + } + } +} diff --git a/akka-remote/src/test/scala/serialization/ScalaJSONSerializerSpec.scala b/akka-remote/src/test/scala/serialization/ScalaJSONSerializerSpec.scala new file mode 100644 index 0000000000..7d1ef4c7a0 --- /dev/null +++ b/akka-remote/src/test/scala/serialization/ScalaJSONSerializerSpec.scala @@ -0,0 +1,52 @@ +package se.scalablesolutions.akka.serialization + +import org.scalatest.Spec +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import se.scalablesolutions.akka.serialization.Serializer.ScalaJSON + +object Protocols { + import sjson.json.DefaultProtocol._ + case class Shop(store: String, item: String, price: Int) + implicit val ShopFormat: sjson.json.Format[Shop] = + asProduct3("store", "item", "price")(Shop)(Shop.unapply(_).get) + + case class MyMessage(val id: String, val value: Tuple2[String, Int]) + implicit val MyMessageFormat: sjson.json.Format[MyMessage] = + asProduct2("id", "value")(MyMessage)(MyMessage.unapply(_).get) + + case class MyJsonObject(val key: String, val map: Map[String, Int], + val standAloneInt: Int) + implicit val MyJsonObjectFormat: sjson.json.Format[MyJsonObject] = + asProduct3("key", "map", "standAloneInt")(MyJsonObject)(MyJsonObject.unapply(_).get) +} + +@RunWith(classOf[JUnitRunner]) +class ScalaJSONSerializerSpec extends + Spec with + ShouldMatchers with + BeforeAndAfterAll { + + import Protocols._ + import ScalaJSON._ + describe("Serialization of case classes") { + it("should be able to serialize and de-serialize") { + val s = Shop("Target", "cooker", 120) + fromjson[Shop](tojson(s)) should equal(s) + frombinary[Shop](tobinary(s)) should equal(s) + + val o = MyMessage("dg", ("akka", 100)) + fromjson[MyMessage](tojson(o)) should equal(o) + frombinary[MyMessage](tobinary(o)) should equal(o) + + val key: String = "myKey" + val value: Int = 123 + val standAloneInt: Int = 35 + val message = MyJsonObject(key, Map(key -> value), standAloneInt) + fromjson[MyJsonObject](tojson(message)) should equal(message) + } + } +} diff --git a/akka-remote/src/test/scala/serialization/SerializableTypeClassActorSpec.scala b/akka-remote/src/test/scala/serialization/SerializableTypeClassActorSpec.scala index 832a655c22..de64b803fa 100644 --- a/akka-remote/src/test/scala/serialization/SerializableTypeClassActorSpec.scala +++ b/akka-remote/src/test/scala/serialization/SerializableTypeClassActorSpec.scala @@ -8,6 +8,7 @@ import org.scalatest.junit.JUnitRunner import org.junit.runner.RunWith import se.scalablesolutions.akka.serialization._ +import dispatch.json._ import se.scalablesolutions.akka.actor._ import ActorSerialization._ import Actor._ @@ -52,6 +53,10 @@ class SerializableTypeClassActorSpec extends implicit object MyStatelessActorFormat extends StatelessActorFormat[MyStatelessActorWithMessagesInMailbox] } + object BinaryFormatMyActorWithSerializableMessages { + implicit object MyActorWithSerializableMessagesFormat extends StatelessActorFormat[MyActorWithSerializableMessages] + } + object BinaryFormatMyJavaSerializableActor { implicit object MyJavaSerializableActorFormat extends SerializerBasedActorFormat[MyJavaSerializableActor] { val serializer = Serializer.Java @@ -139,6 +144,29 @@ class SerializableTypeClassActorSpec extends (actor3 !! "hello-reply").getOrElse("_") should equal("world") } } + + describe("Custom serializable actors") { + it("should serialize and de-serialize") { + import BinaryFormatMyActorWithSerializableMessages._ + + val actor1 = actorOf[MyActorWithSerializableMessages].start + (actor1 ! MyMessage("hello1", ("akka", 100))) + (actor1 ! MyMessage("hello2", ("akka", 200))) + (actor1 ! MyMessage("hello3", ("akka", 300))) + (actor1 ! MyMessage("hello4", ("akka", 400))) + (actor1 ! MyMessage("hello5", ("akka", 500))) + actor1.mailboxSize should be > (0) + val actor2 = fromBinary(toBinary(actor1)) + Thread.sleep(1000) + actor2.mailboxSize should be > (0) + (actor2 !! "hello-reply").getOrElse("_") should equal("world") + + val actor3 = fromBinary(toBinary(actor1, false)) + Thread.sleep(1000) + actor3.mailboxSize should equal(0) + (actor3 !! "hello-reply").getOrElse("_") should equal("world") + } + } } class MyActorWithDualCounter extends Actor { @@ -188,3 +216,27 @@ class MyStatelessActorWithMessagesInMailbox extends Actor { self.reply("world " + count) } } + +class MyActorWithSerializableMessages extends Actor { + def receive = { + case MyMessage(s, t) => + println("# messages in mailbox " + self.mailboxSize) + Thread.sleep(500) + case "hello-reply" => self.reply("world") + } +} + +case class MyMessage(val id: String, val value: Tuple2[String, Int]) + extends Serializable.ScalaJSON[MyMessage] { + + def this() = this(null, null) + import sjson.json.DefaultProtocol._ + import sjson.json._ + import sjson.json.JsonSerialization._ + implicit val MyMessageFormat: sjson.json.Format[MyMessage] = + asProduct2("id", "value")(MyMessage)(MyMessage.unapply(_).get) + + def toBytes: Array[Byte] = tobinary(this) + def fromBytes(bytes: Array[Byte]) = frombinary[MyMessage](bytes) + def fromJSON(js: String) = fromjson[MyMessage](Js(js)) +} diff --git a/akka-remote/src/test/scala/serialization/Ticket436Spec.scala b/akka-remote/src/test/scala/serialization/Ticket436Spec.scala deleted file mode 100644 index 042f3f07be..0000000000 --- a/akka-remote/src/test/scala/serialization/Ticket436Spec.scala +++ /dev/null @@ -1,49 +0,0 @@ -package se.scalablesolutions.akka.actor.serialization - - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import se.scalablesolutions.akka.serialization.Serializer -import se.scalablesolutions.akka.serialization.Serializable.ScalaJSON -import scala.reflect._ -import scala.annotation.target._ -import sjson.json.JSONTypeHint - -@BeanInfo class MyJsonObject(val key: String, - @(JSONTypeHint @field)(value = classOf[Int]) - val map: Map[String, Int], - val standAloneInt: Int) extends ScalaJSON { - private def this() = this(null, null, -1) - override def toString(): String = try { - val mapValue: Int = map.getOrElse(key, -1) - println("Map value: %s".format(mapValue.asInstanceOf[AnyRef].getClass)) - "Key: %s, Map value: %d, Stand Alone Int: %d".format(key, mapValue, standAloneInt) - } catch { - case e: ClassCastException => e.getMessage - case _ => "Unknown error" - } -} - -@RunWith(classOf[JUnitRunner]) -class Ticket436Spec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { - - describe("Serialization of Maps containing Int") { - it("should be able to serialize and de-serialize preserving the data types of the Map") { - val key: String = "myKey" - val value: Int = 123 - val standAloneInt: Int = 35 - val message = new MyJsonObject(key, Map(key -> value), standAloneInt) - - val json = message.toJSON - val copy = Serializer.ScalaJSON.fromJSON[MyJsonObject](json) - copy.asInstanceOf[MyJsonObject].map.get("myKey").get.isInstanceOf[Int] should equal(true) - } - } -} From 1fdaf222a21abf98e198fa70675b506ce07788fb Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Wed, 22 Sep 2010 15:47:19 +0200 Subject: [PATCH 21/52] Adding poms --- .../apache/hbase/hbase-core/0.20.6/hbase-core-0.20.6.pom | 8 ++++++++ .../apache/hbase/hbase-test/0.20.6/hbase-test-0.20.6.pom | 8 ++++++++ 2 files changed, 16 insertions(+) create mode 100644 embedded-repo/org/apache/hbase/hbase-core/0.20.6/hbase-core-0.20.6.pom create mode 100644 embedded-repo/org/apache/hbase/hbase-test/0.20.6/hbase-test-0.20.6.pom diff --git a/embedded-repo/org/apache/hbase/hbase-core/0.20.6/hbase-core-0.20.6.pom b/embedded-repo/org/apache/hbase/hbase-core/0.20.6/hbase-core-0.20.6.pom new file mode 100644 index 0000000000..19a8b54700 --- /dev/null +++ b/embedded-repo/org/apache/hbase/hbase-core/0.20.6/hbase-core-0.20.6.pom @@ -0,0 +1,8 @@ + + + 4.0.0 + org.apache.hbase + hbase-core + 0.20.6 + jar + \ No newline at end of file diff --git a/embedded-repo/org/apache/hbase/hbase-test/0.20.6/hbase-test-0.20.6.pom b/embedded-repo/org/apache/hbase/hbase-test/0.20.6/hbase-test-0.20.6.pom new file mode 100644 index 0000000000..bdc80cc8b7 --- /dev/null +++ b/embedded-repo/org/apache/hbase/hbase-test/0.20.6/hbase-test-0.20.6.pom @@ -0,0 +1,8 @@ + + + 4.0.0 + org.apache.hbase + hbase-test + 0.20.6 + jar + \ No newline at end of file From 38978ab71d394b2e065508736379f8e1b6748fb1 Mon Sep 17 00:00:00 2001 From: David Greco Date: Wed, 22 Sep 2010 16:05:43 +0200 Subject: [PATCH 22/52] Now the hbase persistent storage tests dont'run by default --- .../src/main/scala/HbaseStorageBackend.scala | 1 - ...istentActorSpec.scala => HbasePersistentActorSpecTest.scala} | 0 .../{HbaseStorageSpec.scala => HbaseStorageSpecTest.scala} | 0 .../{HbaseTicket343Spec.scala => HbaseTicket343SpecTest.scala} | 0 .../scala/{SimpleHbaseTest.scala => SimpleHbaseSpecTest.scala} | 0 project/build/AkkaProject.scala | 2 ++ 6 files changed, 2 insertions(+), 1 deletion(-) rename akka-persistence/akka-persistence-hbase/src/test/scala/{HbasePersistentActorSpec.scala => HbasePersistentActorSpecTest.scala} (100%) rename akka-persistence/akka-persistence-hbase/src/test/scala/{HbaseStorageSpec.scala => HbaseStorageSpecTest.scala} (100%) rename akka-persistence/akka-persistence-hbase/src/test/scala/{HbaseTicket343Spec.scala => HbaseTicket343SpecTest.scala} (100%) rename akka-persistence/akka-persistence-hbase/src/test/scala/{SimpleHbaseTest.scala => SimpleHbaseSpecTest.scala} (100%) diff --git a/akka-persistence/akka-persistence-hbase/src/main/scala/HbaseStorageBackend.scala b/akka-persistence/akka-persistence-hbase/src/main/scala/HbaseStorageBackend.scala index 69c393f455..30873bf036 100644 --- a/akka-persistence/akka-persistence-hbase/src/main/scala/HbaseStorageBackend.scala +++ b/akka-persistence/akka-persistence-hbase/src/main/scala/HbaseStorageBackend.scala @@ -25,7 +25,6 @@ import org.apache.hadoop.hbase.util.Bytes */ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte], Array[Byte]] with VectorStorageBackend[Array[Byte]] with RefStorageBackend[Array[Byte]] with Logging { - val EMPTY_BYTE_ARRAY = new Array[Byte](0) val HBASE_ZOOKEEPER_QUORUM = config.getString("akka.storage.hbase.zookeeper-quorum", "localhost") val CONFIGURATION = new HBaseConfiguration val REF_TABLE_NAME = "__REF_TABLE" diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpec.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTest.scala similarity index 100% rename from akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpec.scala rename to akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTest.scala diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpec.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTest.scala similarity index 100% rename from akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpec.scala rename to akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTest.scala diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343Spec.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTest.scala similarity index 100% rename from akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343Spec.scala rename to akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTest.scala diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseTest.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTest.scala similarity index 100% rename from akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseTest.scala rename to akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTest.scala diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index bb468c7b9f..a2806d2362 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -540,6 +540,8 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { val hbase_test = Dependencies.hbase_test val jetty = Dependencies.jett_mortbay val log4j = Dependencies.log4j + + override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil } // ------------------------------------------------------------------------------------------------------------------- From 4efef68acd4b13796f6210628ae4183b184c1065 Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Wed, 22 Sep 2010 16:12:55 +0200 Subject: [PATCH 23/52] Ported HBase to use new Uuids --- .../src/main/scala/HbaseStorage.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/akka-persistence/akka-persistence-hbase/src/main/scala/HbaseStorage.scala b/akka-persistence/akka-persistence-hbase/src/main/scala/HbaseStorage.scala index 1c3abdff4e..fc8114b7dd 100644 --- a/akka-persistence/akka-persistence-hbase/src/main/scala/HbaseStorage.scala +++ b/akka-persistence/akka-persistence-hbase/src/main/scala/HbaseStorage.scala @@ -4,16 +4,16 @@ package se.scalablesolutions.akka.persistence.hbase -import se.scalablesolutions.akka.util.UUID +import se.scalablesolutions.akka.actor.{Uuid,newUuid} import se.scalablesolutions.akka.stm._ import se.scalablesolutions.akka.persistence.common._ object HbaseStorage extends Storage { type ElementType = Array[Byte] - def newMap: PersistentMap[ElementType, ElementType] = newMap(UUID.newUuid.toString) - def newVector: PersistentVector[ElementType] = newVector(UUID.newUuid.toString) - def newRef: PersistentRef[ElementType] = newRef(UUID.newUuid.toString) + def newMap: PersistentMap[ElementType, ElementType] = newMap(newUuid.toString) + def newVector: PersistentVector[ElementType] = newVector(newUuid.toString) + def newRef: PersistentRef[ElementType] = newRef(newUuid.toString) def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id) def getVector(id: String): PersistentVector[ElementType] = newVector(id) From 96ded85d21ec70e8556ce42fb2042bf04d874d8e Mon Sep 17 00:00:00 2001 From: David Greco Date: Wed, 22 Sep 2010 16:28:53 +0200 Subject: [PATCH 24/52] renamed the files and the names of the habse tests, the names now ends with Test --- .../src/test/scala/HbasePersistentActorSpecTest.scala | 2 +- .../src/test/scala/HbaseStorageSpecTest.scala | 2 +- .../src/test/scala/HbaseTicket343SpecTest.scala | 2 +- .../src/test/scala/SimpleHbaseSpecTest.scala | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTest.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTest.scala index 468cd800ce..bad557dcb9 100644 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTest.scala +++ b/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTest.scala @@ -76,7 +76,7 @@ class PersistentFailerActor extends Transactor { } } -class HbasePersistentActorSpec extends JUnitSuite with BeforeAndAfterAll { +class HbasePersistentActorSpecTest extends JUnitSuite with BeforeAndAfterAll { val testUtil = new HBaseTestingUtility diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTest.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTest.scala index 1bad777675..7b38389424 100644 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTest.scala +++ b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTest.scala @@ -5,7 +5,7 @@ import org.scalatest.matchers.ShouldMatchers import org.scalatest.BeforeAndAfterAll import org.scalatest.BeforeAndAfterEach -class HbaseStorageSpec extends +class HbaseStorageSpecTest extends Spec with ShouldMatchers with BeforeAndAfterAll with diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTest.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTest.scala index d61b82fa87..2e0d446403 100644 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTest.scala +++ b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTest.scala @@ -171,7 +171,7 @@ object Storage { import Storage._ @RunWith(classOf[JUnitRunner]) -class HbaseTicket343Spec extends Spec with ShouldMatchers with BeforeAndAfterAll with BeforeAndAfterEach { +class HbaseTicket343SpecTest extends Spec with ShouldMatchers with BeforeAndAfterAll with BeforeAndAfterEach { import org.apache.hadoop.hbase.HBaseTestingUtility diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTest.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTest.scala index f59e3ae55e..883e94b5eb 100644 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTest.scala +++ b/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTest.scala @@ -10,7 +10,7 @@ import org.junit.Test import org.apache.hadoop.hbase.HBaseTestingUtility @RunWith(classOf[JUnitRunner]) -class PersistenceSpec extends Spec with BeforeAndAfterAll with ShouldMatchers { +class PersistenceSpecTest extends Spec with BeforeAndAfterAll with ShouldMatchers { import org.apache.hadoop.hbase.HBaseTestingUtility From 476e8108332fca8cac821c3b30dd8a0bc178b8dc Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Wed, 22 Sep 2010 22:14:07 +0200 Subject: [PATCH 25/52] Bumping Jersey to 1.3 --- project/build/AkkaProject.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index a2806d2362..28360f155b 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -95,7 +95,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val DISPATCH_VERSION = "0.7.4" lazy val HAWT_DISPATCH_VERSION = "1.0" lazy val JACKSON_VERSION = "1.2.1" - lazy val JERSEY_VERSION = "1.2" + lazy val JERSEY_VERSION = "1.3" lazy val MULTIVERSE_VERSION = "0.6.1" lazy val SCALATEST_VERSION = "1.2-for-scala-2.8.0.final-SNAPSHOT" lazy val LOGBACK_VERSION = "0.9.24" From a6dd0986e72b4491c2176f784557f3de5df5a45e Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Thu, 23 Sep 2010 10:27:05 +0200 Subject: [PATCH 26/52] Removing log4j and making Jetty intransitive --- project/build/AkkaProject.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index 28360f155b..8926799556 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -228,8 +228,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val scalatest = "org.scalatest" % "scalatest" % SCALATEST_VERSION % "test" lazy val hadoop_test = "org.apache.hadoop" % "hadoop-test" % "0.20.2" % "test" lazy val hbase_test = "org.apache.hbase" % "hbase-test" % "0.20.6" % "test" - lazy val log4j = "log4j" % "log4j" % "1.2.15" % "test" - lazy val jett_mortbay = "org.mortbay.jetty" % "jetty" % "6.1.14" % "test" + lazy val jett_mortbay = "org.mortbay.jetty" % "jetty" % "6.1.14" % "test" intransitive() } // ------------------------------------------------------------------------------------------------------------------- @@ -539,7 +538,6 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { val hadoop_test = Dependencies.hadoop_test val hbase_test = Dependencies.hbase_test val jetty = Dependencies.jett_mortbay - val log4j = Dependencies.log4j override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil } From 1d9d8495dda9f0cc4388e3eae44555d76b831bfa Mon Sep 17 00:00:00 2001 From: David Greco Date: Thu, 23 Sep 2010 10:37:22 +0200 Subject: [PATCH 27/52] Modified the hbase storage backend dependencies to exclude sl4j --- project/build/AkkaProject.scala | 30 ++++++++++++++++++++---------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index 28360f155b..e98d68d77a 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -229,7 +229,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val hadoop_test = "org.apache.hadoop" % "hadoop-test" % "0.20.2" % "test" lazy val hbase_test = "org.apache.hbase" % "hbase-test" % "0.20.6" % "test" lazy val log4j = "log4j" % "log4j" % "1.2.15" % "test" - lazy val jett_mortbay = "org.mortbay.jetty" % "jetty" % "6.1.14" % "test" + lazy val jetty_mortbay = "org.mortbay.jetty" % "jetty" % "6.1.14" % "test" } // ------------------------------------------------------------------------------------------------------------------- @@ -531,15 +531,25 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------------------------------------------------------------- class AkkaHbaseProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val zookeeper = Dependencies.zookeeper - val hadoop_core = Dependencies.hadoop_core - val hbase_core = Dependencies.hbase_core - - // testing - val hadoop_test = Dependencies.hadoop_test - val hbase_test = Dependencies.hbase_test - val jetty = Dependencies.jett_mortbay - val log4j = Dependencies.log4j + override def ivyXML = + + + + + + + + + + + + + + + + + + override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil } From f9c8af68ef73de74665afc86837c08d9eef02d49 Mon Sep 17 00:00:00 2001 From: David Greco Date: Thu, 23 Sep 2010 11:16:00 +0200 Subject: [PATCH 28/52] renamed the files and the names of the habse tests, the names now ends with Test --- project/build/AkkaProject.scala | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index e98d68d77a..75ea8522ec 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -533,21 +533,21 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaHbaseProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { override def ivyXML = - + - + + + + + + - + - - + - - - - - + From b3b2ddacb9b3b9e5d9e53292018d8d757e32f097 Mon Sep 17 00:00:00 2001 From: David Greco Date: Thu, 23 Sep 2010 13:12:10 +0200 Subject: [PATCH 29/52] Modified the hbase storage backend dependencies to exclude sl4j --- project/build/AkkaProject.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index 75ea8522ec..0829162a9e 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -533,11 +533,11 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaHbaseProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { override def ivyXML = - + - + - + From b5670111d041a450d1dad2b662798017b19a0507 Mon Sep 17 00:00:00 2001 From: Michael Kober Date: Thu, 23 Sep 2010 13:38:33 +0200 Subject: [PATCH 30/52] fixed some tests --- .../scala/remote/ClientInitiatedRemoteActorSpec.scala | 2 ++ .../scala/remote/ServerInitiatedRemoteActorSpec.scala | 2 ++ akka-remote/src/test/scala/ticket/Ticket434Spec.scala | 11 +++++++---- 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/akka-remote/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala b/akka-remote/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala index 6670722b02..d39b58d41d 100644 --- a/akka-remote/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala +++ b/akka-remote/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala @@ -79,6 +79,8 @@ class ClientInitiatedRemoteActorSpec extends JUnitSuite { @After def finished() { s1.shutdown + val s2 = RemoteServer.serverFor(HOSTNAME, PORT2) + if (s2.isDefined) s2.get.shutdown RemoteClient.shutdownAll Thread.sleep(1000) } diff --git a/akka-remote/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala b/akka-remote/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala index fbf723ece5..beb3bd2f46 100644 --- a/akka-remote/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala +++ b/akka-remote/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala @@ -72,6 +72,8 @@ class ServerInitiatedRemoteActorSpec extends JUnitSuite { def finished { try { server.shutdown + val s2 = RemoteServer.serverFor(HOSTNAME, PORT2) + if (s2.isDefined) s2.get.shutdown RemoteClient.shutdownAll Thread.sleep(1000) } catch { diff --git a/akka-remote/src/test/scala/ticket/Ticket434Spec.scala b/akka-remote/src/test/scala/ticket/Ticket434Spec.scala index b27c17cfe5..fd21d5a191 100644 --- a/akka-remote/src/test/scala/ticket/Ticket434Spec.scala +++ b/akka-remote/src/test/scala/ticket/Ticket434Spec.scala @@ -14,12 +14,15 @@ import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ class Ticket434Spec extends Spec with ShouldMatchers { + val HOSTNAME = "localhost" + val PORT = 9991 + describe("A server managed remote actor") { - it("should possible be use a custom service name containing ':'") { - val server = new RemoteServer().start("localhost", 9999) + it("can use a custom service name containing ':'") { + val server = new RemoteServer().start(HOSTNAME, PORT) server.register("my:service", actorOf[RemoteActorSpecActorUnidirectional]) - val actor = RemoteClient.actorFor("my:service", 5000L, "localhost", 9999) + val actor = RemoteClient.actorFor("my:service", 5000L, HOSTNAME, PORT) actor ! "OneWay" assert(RemoteActorSpecActorUnidirectional.latch.await(1, TimeUnit.SECONDS)) @@ -31,7 +34,7 @@ class Ticket434Spec extends Spec with ShouldMatchers { } describe("The ActorInfoProtocol") { - it("should be possible to set the acor id and uuuid") { + it("sets the acor id and uuuid") { val actorInfoBuilder = ActorInfoProtocol.newBuilder .setUuid("unique-id") .setId("some-id") From 491722f569680e6115c280ab4197948831478608 Mon Sep 17 00:00:00 2001 From: Michael Kober Date: Thu, 23 Sep 2010 13:38:33 +0200 Subject: [PATCH 31/52] fixed some tests --- .../scala/remote/ClientInitiatedRemoteActorSpec.scala | 2 ++ .../scala/remote/ServerInitiatedRemoteActorSpec.scala | 2 ++ akka-remote/src/test/scala/ticket/Ticket434Spec.scala | 11 +++++++---- 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/akka-remote/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala b/akka-remote/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala index 6670722b02..d39b58d41d 100644 --- a/akka-remote/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala +++ b/akka-remote/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala @@ -79,6 +79,8 @@ class ClientInitiatedRemoteActorSpec extends JUnitSuite { @After def finished() { s1.shutdown + val s2 = RemoteServer.serverFor(HOSTNAME, PORT2) + if (s2.isDefined) s2.get.shutdown RemoteClient.shutdownAll Thread.sleep(1000) } diff --git a/akka-remote/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala b/akka-remote/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala index fbf723ece5..f78fcb89c9 100644 --- a/akka-remote/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala +++ b/akka-remote/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala @@ -72,6 +72,8 @@ class ServerInitiatedRemoteActorSpec extends JUnitSuite { def finished { try { server.shutdown + val s2 = RemoteServer.serverFor(HOSTNAME, PORT + 1) + if (s2.isDefined) s2.get.shutdown RemoteClient.shutdownAll Thread.sleep(1000) } catch { diff --git a/akka-remote/src/test/scala/ticket/Ticket434Spec.scala b/akka-remote/src/test/scala/ticket/Ticket434Spec.scala index b27c17cfe5..fd21d5a191 100644 --- a/akka-remote/src/test/scala/ticket/Ticket434Spec.scala +++ b/akka-remote/src/test/scala/ticket/Ticket434Spec.scala @@ -14,12 +14,15 @@ import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ class Ticket434Spec extends Spec with ShouldMatchers { + val HOSTNAME = "localhost" + val PORT = 9991 + describe("A server managed remote actor") { - it("should possible be use a custom service name containing ':'") { - val server = new RemoteServer().start("localhost", 9999) + it("can use a custom service name containing ':'") { + val server = new RemoteServer().start(HOSTNAME, PORT) server.register("my:service", actorOf[RemoteActorSpecActorUnidirectional]) - val actor = RemoteClient.actorFor("my:service", 5000L, "localhost", 9999) + val actor = RemoteClient.actorFor("my:service", 5000L, HOSTNAME, PORT) actor ! "OneWay" assert(RemoteActorSpecActorUnidirectional.latch.await(1, TimeUnit.SECONDS)) @@ -31,7 +34,7 @@ class Ticket434Spec extends Spec with ShouldMatchers { } describe("The ActorInfoProtocol") { - it("should be possible to set the acor id and uuuid") { + it("sets the acor id and uuuid") { val actorInfoBuilder = ActorInfoProtocol.newBuilder .setUuid("unique-id") .setId("some-id") From 79dc3483d980d637c0c516f30ce28072b2f191ad Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Thu, 23 Sep 2010 14:11:15 +0200 Subject: [PATCH 32/52] Removing the running of HBase tests --- .../src/test/scala/HbasePersistentActorSpecTest.scala | 2 +- .../src/test/scala/HbaseStorageSpecTest.scala | 2 +- .../src/test/scala/HbaseTicket343SpecTest.scala | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTest.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTest.scala index bad557dcb9..468cd800ce 100644 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTest.scala +++ b/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTest.scala @@ -76,7 +76,7 @@ class PersistentFailerActor extends Transactor { } } -class HbasePersistentActorSpecTest extends JUnitSuite with BeforeAndAfterAll { +class HbasePersistentActorSpec extends JUnitSuite with BeforeAndAfterAll { val testUtil = new HBaseTestingUtility diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTest.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTest.scala index 7b38389424..1bad777675 100644 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTest.scala +++ b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTest.scala @@ -5,7 +5,7 @@ import org.scalatest.matchers.ShouldMatchers import org.scalatest.BeforeAndAfterAll import org.scalatest.BeforeAndAfterEach -class HbaseStorageSpecTest extends +class HbaseStorageSpec extends Spec with ShouldMatchers with BeforeAndAfterAll with diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTest.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTest.scala index 2e0d446403..d61b82fa87 100644 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTest.scala +++ b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTest.scala @@ -171,7 +171,7 @@ object Storage { import Storage._ @RunWith(classOf[JUnitRunner]) -class HbaseTicket343SpecTest extends Spec with ShouldMatchers with BeforeAndAfterAll with BeforeAndAfterEach { +class HbaseTicket343Spec extends Spec with ShouldMatchers with BeforeAndAfterAll with BeforeAndAfterEach { import org.apache.hadoop.hbase.HBaseTestingUtility From cb3fb2523140eb3eb69ca107aaecad51944abd9b Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Thu, 23 Sep 2010 14:50:11 +0200 Subject: [PATCH 33/52] Removing registeredInRemoteNodeDuringSerialization --- akka-actor/src/main/scala/actor/ActorRef.scala | 1 - akka-remote/src/main/scala/remote/RemoteServer.scala | 12 +++++------- .../scala/serialization/SerializationProtocol.scala | 9 +++------ 3 files changed, 8 insertions(+), 14 deletions(-) diff --git a/akka-actor/src/main/scala/actor/ActorRef.scala b/akka-actor/src/main/scala/actor/ActorRef.scala index e680f13c58..0ec35ab9b4 100644 --- a/akka-actor/src/main/scala/actor/ActorRef.scala +++ b/akka-actor/src/main/scala/actor/ActorRef.scala @@ -82,7 +82,6 @@ trait ActorRef extends @volatile protected[this] var _status: ActorRefStatus.StatusType = ActorRefStatus.UNSTARTED @volatile protected[akka] var _homeAddress = new InetSocketAddress(RemoteServerModule.HOSTNAME, RemoteServerModule.PORT) @volatile protected[akka] var _futureTimeout: Option[ScheduledFuture[AnyRef]] = None - @volatile protected[akka] var registeredInRemoteNodeDuringSerialization = false protected[akka] val guard = new ReentrantGuard /** diff --git a/akka-remote/src/main/scala/remote/RemoteServer.scala b/akka-remote/src/main/scala/remote/RemoteServer.scala index fadc5ce60a..bed9e9f933 100644 --- a/akka-remote/src/main/scala/remote/RemoteServer.scala +++ b/akka-remote/src/main/scala/remote/RemoteServer.scala @@ -337,8 +337,8 @@ class RemoteServer extends Logging with ListenerManagement { def unregister(actorRef: ActorRef):Unit = synchronized { if (_isRunning) { log.debug("Unregistering server side remote actor [%s] with id [%s:%s]", actorRef.actorClass.getName, actorRef.id, actorRef.uuid) - actors() remove actorRef.id - if (actorRef.registeredInRemoteNodeDuringSerialization) actorsByUuid() remove actorRef.uuid + actors().remove(actorRef.id,actorRef) + actorsByUuid().remove(actorRef.uuid,actorRef) } } @@ -353,11 +353,9 @@ class RemoteServer extends Logging with ListenerManagement { if (id.startsWith(UUID_PREFIX)) { actorsByUuid().remove(id.substring(UUID_PREFIX.length)) } else { - val actorRef = actors().get(id) - if (actorRef.registeredInRemoteNodeDuringSerialization) { - actorsByUuid() remove actorRef.uuid - } - actors() remove id + val actorRef = actors() get id + actorsByUuid().remove(actorRef.uuid,actorRef) + actors().remove(id,actorRef) } } } diff --git a/akka-remote/src/main/scala/serialization/SerializationProtocol.scala b/akka-remote/src/main/scala/serialization/SerializationProtocol.scala index 0e7f5ce732..c07417c0e2 100644 --- a/akka-remote/src/main/scala/serialization/SerializationProtocol.scala +++ b/akka-remote/src/main/scala/serialization/SerializationProtocol.scala @@ -249,12 +249,9 @@ object RemoteActorSerialization { val host = homeAddress.getHostName val port = homeAddress.getPort - if (!registeredInRemoteNodeDuringSerialization) { - Actor.log.debug("Register serialized Actor [%s] as remote @ [%s:%s]", actorClass.getName, host, port) - RemoteServer.getOrCreateServer(homeAddress) - RemoteServer.registerActorByUuid(homeAddress, uuid.toString, ar) - registeredInRemoteNodeDuringSerialization = true - } + Actor.log.debug("Register serialized Actor [%s] as remote @ [%s:%s]", actorClass.getName, host, port) + RemoteServer.getOrCreateServer(homeAddress) + RemoteServer.registerActorByUuid(homeAddress, uuid.toString, ar) RemoteActorRefProtocol.newBuilder .setClassOrServiceName(uuid.toString) From ccee06a4c5f1a5bbd290628fdbf87a39f8c0e8a8 Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Thu, 23 Sep 2010 16:40:55 +0200 Subject: [PATCH 34/52] Temporarily removing voldemort module pending license resolution --- project/build/AkkaProject.scala | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index 362dabfb1c..63881d99fb 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -54,7 +54,6 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val CasbahRepoReleases = MavenRepository("Casbah Release Repo", "http://repo.bumnetworks.com/releases") lazy val ZookeeperRepo = MavenRepository("Zookeeper Repo", "http://lilycms.org/maven/maven2/deploy/") lazy val ClojarsRepo = MavenRepository("Clojars Repo", "http://clojars.org/repo") - lazy val OracleRepo = MavenRepository("Oracle Repo", "http://download.oracle.com/maven") } // ------------------------------------------------------------------------------------------------------------------- @@ -86,7 +85,6 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val casbahModuleConfig = ModuleConfiguration("com.novus", CasbahRepo) lazy val timeModuleConfig = ModuleConfiguration("org.scala-tools", "time", CasbahSnapshotRepo) lazy val voldemortModuleConfig = ModuleConfiguration("voldemort", ClojarsRepo) - lazy val sleepycatModuleConfig = ModuleConfiguration("com.sleepycat", OracleRepo) lazy val embeddedRepo = EmbeddedRepo // This is the only exception, because the embedded repo is fast! // ------------------------------------------------------------------------------------------------------------------- @@ -248,7 +246,6 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val jdom = "org.jdom" % "jdom" % "1.1" % "test" lazy val vold_jetty = "org.mortbay.jetty" % "jetty" % "6.1.18" % "test" lazy val velocity = "org.apache.velocity" % "velocity" % "1.6.2" % "test" - lazy val bdb = "com.sleepycat" % "je" % "4.0.103" % "test" lazy val dbcp = "commons-dbcp" % "commons-dbcp" % "1.2.2" % "test" } @@ -498,8 +495,8 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { new AkkaCassandraProject(_), akka_persistence_common) lazy val akka_persistence_hbase = project("akka-persistence-hbase", "akka-persistence-hbase", new AkkaHbaseProject(_), akka_persistence_common) - lazy val akka_persistence_voldemort = project("akka-persistence-voldemort", "akka-persistence-voldemort", - new AkkaVoldemortProject(_), akka_persistence_common) + //lazy val akka_persistence_voldemort = project("akka-persistence-voldemort", "akka-persistence-voldemort", + // new AkkaVoldemortProject(_), akka_persistence_common) } // ------------------------------------------------------------------------------------------------------------------- @@ -591,7 +588,6 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { val jdom = Dependencies.jdom val jetty = Dependencies.vold_jetty val velocity = Dependencies.velocity - val bdb = Dependencies.bdb val dbcp = Dependencies.dbcp val sjson = Dependencies.sjson_test @@ -642,7 +638,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { new AkkaOSGiAssemblyProject(_), akka_osgi_dependencies_bundle, akka_remote, akka_amqp, akka_http, akka_camel, akka_spring, akka_jta, akka_persistence.akka_persistence_common, akka_persistence.akka_persistence_redis, akka_persistence.akka_persistence_mongo, - akka_persistence.akka_persistence_cassandra,akka_persistence.akka_persistence_voldemort) + akka_persistence.akka_persistence_cassandra,akka_persistence.akka_persistence_hbase/*akka_persistence.akka_persistence_voldemort*/) } class AkkaOSGiDependenciesBundleProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with BNDPlugin { From f0ff68a39e7a074b044aee590eb39e06574b3a84 Mon Sep 17 00:00:00 2001 From: ticktock Date: Thu, 23 Sep 2010 10:57:14 -0400 Subject: [PATCH 35/52] Removing BDB as a test-runtime dependency --- .../src/test/resources/config/server.properties | 2 ++ .../src/test/resources/config/stores.xml | 10 +++++----- .../src/test/scala/EmbeddedVoldemort.scala | 3 +++ project/build/AkkaProject.scala | 4 ---- 4 files changed, 10 insertions(+), 9 deletions(-) diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties index 8f5a8ff884..6dcd5bb340 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties +++ b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties @@ -1,2 +1,4 @@ node.id=0 enable.rebalancing=false +enable.bdb.engine=false +slop.enable=false diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml index f2dd6ac099..26832d93fe 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml +++ b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml @@ -6,7 +6,7 @@ 1 1 1 - bdb + memory client string @@ -23,7 +23,7 @@ 1 1 1 - bdb + memory client identity @@ -39,7 +39,7 @@ 1 1 1 - bdb + memory client string @@ -56,7 +56,7 @@ 1 1 1 - bdb + memory client identity @@ -72,7 +72,7 @@ 1 1 1 - bdb + memory client string diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala index 034b493006..422d54c4ec 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala @@ -8,6 +8,8 @@ import org.scalatest.junit.JUnitRunner import voldemort.utils.Utils import java.io.File import se.scalablesolutions.akka.util.{Logging, UUID} +import collection.JavaConversions +import voldemort.store.memory.InMemoryStorageConfiguration @RunWith(classOf[JUnitRunner]) trait EmbeddedVoldemort extends BeforeAndAfterAll with Logging { @@ -21,6 +23,7 @@ trait EmbeddedVoldemort extends BeforeAndAfterAll with Logging { val home = new File(dir) log.info("Creating Voldemort Config") val config = VoldemortConfig.loadFromVoldemortHome(home.getCanonicalPath) + config.setStorageConfigurations(JavaConversions.asList(List(classOf[InMemoryStorageConfiguration].getName))) log.info("Starting Voldemort") server = new VoldemortServer(config) server.start diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index 6d800cdc36..e120f38d86 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -52,7 +52,6 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val SonatypeSnapshotRepo = MavenRepository("Sonatype OSS Repo", "http://oss.sonatype.org/content/repositories/releases") lazy val SunJDMKRepo = MavenRepository("Sun JDMK Repo", "http://wp5.e-taxonomy.eu/cdmlib/mavenrepo") lazy val ClojarsRepo = MavenRepository("Clojars Repo", "http://clojars.org/repo") - lazy val OracleRepo = MavenRepository("Oracle Repo", "http://download.oracle.com/maven") } // ------------------------------------------------------------------------------------------------------------------- @@ -82,7 +81,6 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val casbahModuleConfig = ModuleConfiguration("com.novus", CasbahRepo) lazy val timeModuleConfig = ModuleConfiguration("org.scala-tools", "time", CasbahSnapshotRepo) lazy val voldemortModuleConfig = ModuleConfiguration("voldemort", ClojarsRepo) - lazy val sleepycatModuleConfig = ModuleConfiguration("com.sleepycat", OracleRepo) lazy val embeddedRepo = EmbeddedRepo // This is the only exception, because the embedded repo is fast! // ------------------------------------------------------------------------------------------------------------------- @@ -230,7 +228,6 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val jdom = "org.jdom" % "jdom" % "1.1" % "test" lazy val vold_jetty = "org.mortbay.jetty" % "jetty" % "6.1.18" % "test" lazy val velocity = "org.apache.velocity" % "velocity" % "1.6.2" % "test" - lazy val bdb = "com.sleepycat" % "je" % "4.0.103" % "test" lazy val dbcp = "commons-dbcp" % "commons-dbcp" % "1.2.2" % "test" } @@ -543,7 +540,6 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { val jdom = Dependencies.jdom val jetty = Dependencies.vold_jetty val velocity = Dependencies.velocity - val bdb = Dependencies.bdb val dbcp = Dependencies.dbcp val sjson = Dependencies.sjson_test From 68ac180671e0b0ca001fb2a31a65105f4421a803 Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Thu, 23 Sep 2010 17:16:44 +0200 Subject: [PATCH 36/52] Re-adding voldemort --- project/build/AkkaProject.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index 63881d99fb..82ed509151 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -495,8 +495,8 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { new AkkaCassandraProject(_), akka_persistence_common) lazy val akka_persistence_hbase = project("akka-persistence-hbase", "akka-persistence-hbase", new AkkaHbaseProject(_), akka_persistence_common) - //lazy val akka_persistence_voldemort = project("akka-persistence-voldemort", "akka-persistence-voldemort", - // new AkkaVoldemortProject(_), akka_persistence_common) + lazy val akka_persistence_voldemort = project("akka-persistence-voldemort", "akka-persistence-voldemort", + new AkkaVoldemortProject(_), akka_persistence_common) } // ------------------------------------------------------------------------------------------------------------------- @@ -638,7 +638,8 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { new AkkaOSGiAssemblyProject(_), akka_osgi_dependencies_bundle, akka_remote, akka_amqp, akka_http, akka_camel, akka_spring, akka_jta, akka_persistence.akka_persistence_common, akka_persistence.akka_persistence_redis, akka_persistence.akka_persistence_mongo, - akka_persistence.akka_persistence_cassandra,akka_persistence.akka_persistence_hbase/*akka_persistence.akka_persistence_voldemort*/) + akka_persistence.akka_persistence_cassandra,akka_persistence.akka_persistence_hbase, + akka_persistence.akka_persistence_voldemort) } class AkkaOSGiDependenciesBundleProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with BNDPlugin { From 131ea4f23bb530ae999b180fabf9d6c31920b413 Mon Sep 17 00:00:00 2001 From: Debasish Ghosh Date: Thu, 23 Sep 2010 21:55:05 +0530 Subject: [PATCH 37/52] removed unnecessary imports --- akka-remote/src/main/scala/serialization/Serializer.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/akka-remote/src/main/scala/serialization/Serializer.scala b/akka-remote/src/main/scala/serialization/Serializer.scala index d4950b323e..9df1f4200a 100644 --- a/akka-remote/src/main/scala/serialization/Serializer.scala +++ b/akka-remote/src/main/scala/serialization/Serializer.scala @@ -131,7 +131,6 @@ object Serializer { trait ScalaJSON { import dispatch.json._ import sjson.json._ - import sjson.json.JsonSerialization var classLoader: Option[ClassLoader] = None From d7a2e166dafee7b941b110d63f4881195b978ab9 Mon Sep 17 00:00:00 2001 From: David Greco Date: Thu, 23 Sep 2010 19:37:18 +0200 Subject: [PATCH 38/52] Now the hbase tests don't spit out too much logs, made the running of the hbase tests optional --- .../src/main/resources/log4j.properties | 25 +++++++++++++++++++ config/logback-test.xml | 4 +++ project/build/AkkaProject.scala | 10 +++++++- 3 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 akka-persistence/akka-persistence-hbase/src/main/resources/log4j.properties diff --git a/akka-persistence/akka-persistence-hbase/src/main/resources/log4j.properties b/akka-persistence/akka-persistence-hbase/src/main/resources/log4j.properties new file mode 100644 index 0000000000..5763ff8232 --- /dev/null +++ b/akka-persistence/akka-persistence-hbase/src/main/resources/log4j.properties @@ -0,0 +1,25 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +log4j.rootLogger=ERROR,R + +# rolling log file ("system.log +log4j.appender.R=org.apache.log4j.DailyRollingFileAppender +log4j.appender.R.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.R.layout=org.apache.log4j.PatternLayout +log4j.appender.R.layout.ConversionPattern=%5p [%t] %d{ISO8601} %F (line %L) %m%n +log4j.appender.R.File=target/logs/system.log diff --git a/config/logback-test.xml b/config/logback-test.xml index fdb912d463..0671694e3f 100644 --- a/config/logback-test.xml +++ b/config/logback-test.xml @@ -16,6 +16,10 @@ + + + + diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index 82ed509151..eecb4c4bfb 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -563,6 +563,8 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { + + @@ -571,7 +573,13 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { - override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil + val hbase_test = System.getenv("HBASE_TEST") + override def testOptions = { val o = TestFilter((name: String) => name.endsWith("Test")) :: Nil + if(hbase_test != "true") + o + else + Nil + } } // akka-persistence-voldemort subproject From 4e62147e33673a2ed9f8b4b5e43564f16d967531 Mon Sep 17 00:00:00 2001 From: David Greco Date: Thu, 23 Sep 2010 20:25:01 +0200 Subject: [PATCH 39/52] Corrected the optional run of the hbase tests --- project/build/AkkaProject.scala | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index eecb4c4bfb..d8e36df9d5 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -574,12 +574,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { val hbase_test = System.getenv("HBASE_TEST") - override def testOptions = { val o = TestFilter((name: String) => name.endsWith("Test")) :: Nil - if(hbase_test != "true") - o - else - Nil - } + override def testOptions = if(hbase_test == "true") TestFilter((name: String) => name.endsWith("Test")) :: Nil else TestFilter((name: String) => name.endsWith("Tes")) :: Nil } // akka-persistence-voldemort subproject From 1d1ce9010cc62ead4114d8eb3781e82ff1f6a1a3 Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Thu, 23 Sep 2010 21:30:20 +0200 Subject: [PATCH 40/52] Added support for having integration tests and stresstest optionally enabled --- project/build/AkkaProject.scala | 35 ++++++++++++++++++++++----------- 1 file changed, 23 insertions(+), 12 deletions(-) diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index eecb4c4bfb..a38551bca2 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -516,7 +516,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { val commons_codec = Dependencies.commons_codec val redis = Dependencies.redis - override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil + override def testOptions = createTestFilter( _.endsWith("Test")) } // ------------------------------------------------------------------------------------------------------------------- @@ -527,7 +527,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { val mongo = Dependencies.mongo val casbah = Dependencies.casbah - override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil + override def testOptions = createTestFilter( _.endsWith("Test")) } // ------------------------------------------------------------------------------------------------------------------- @@ -543,7 +543,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { val google_coll = Dependencies.google_coll val high_scale = Dependencies.high_scale - override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil + override def testOptions = createTestFilter( _.endsWith("Test")) } // ------------------------------------------------------------------------------------------------------------------- @@ -573,13 +573,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { - val hbase_test = System.getenv("HBASE_TEST") - override def testOptions = { val o = TestFilter((name: String) => name.endsWith("Test")) :: Nil - if(hbase_test != "true") - o - else - Nil - } + override def testOptions = createTestFilter( _.endsWith("Test") ) } // akka-persistence-voldemort subproject @@ -599,7 +593,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { val dbcp = Dependencies.dbcp val sjson = Dependencies.sjson_test - override def testOptions = TestFilter((name: String) => name.endsWith("Suite")) :: Nil + override def testOptions = createTestFilter( _.endsWith("Suite")) } @@ -834,6 +828,8 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { } def akkaArtifacts = descendents(info.projectPath / "dist", "*" + buildScalaVersion + "-" + version + ".jar") + lazy val integrationTestsEnabled = systemOptional[Boolean]("integration.tests",false) + lazy val stressTestsEnabled = systemOptional[Boolean]("stress.tests",false) // ------------------------------------------------------------ class AkkaDefaultProject(info: ProjectInfo, val deployPath: Path) extends DefaultProject(info) with DeployProject with OSGiProject { @@ -844,6 +840,21 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { override def packageDocsJar = this.defaultJarPath("-docs.jar") override def packageSrcJar = this.defaultJarPath("-sources.jar") override def packageToPublishActions = super.packageToPublishActions ++ Seq(this.packageDocs, this.packageSrc) + + /** + * Used for testOptions, possibility to enable the running of integration and or stresstests + * + * To enable set true and disable set false + * set integration.tests true + * set stress.tests true + */ + def createTestFilter(defaultTests: (String) => Boolean) = { TestFilter({ + case s: String if defaultTests(s) => true + case s: String if integrationTestsEnabled.value => s.endsWith("TestIntegration") + case s: String if stressTestsEnabled.value => s.endsWith("TestStress") + case _ => false + }) :: Nil + } } } @@ -869,4 +880,4 @@ trait DeployProject { self: BasicScalaProject => trait OSGiProject extends BNDPlugin { self: DefaultProject => override def bndExportPackage = Seq("se.scalablesolutions.akka.*;version=%s".format(projectVersion.value)) -} +} \ No newline at end of file From 60bd020150ecbece0845ca2693cf62bddf414733 Mon Sep 17 00:00:00 2001 From: ticktock Date: Thu, 23 Sep 2010 21:14:36 -0400 Subject: [PATCH 41/52] Refactoring Vector to only use 1 voldemort store, and setting up for implementing Queue --- .../main/scala/VoldemortStorageBackend.scala | 37 +++++++++++-------- .../src/test/resources/config/stores.xml | 12 +++--- .../scala/VoldemortPersistentActorSuite.scala | 5 +-- .../scala/VoldemortStorageBackendSuite.scala | 24 ++++-------- config/akka-reference.conf | 6 +-- 5 files changed, 41 insertions(+), 43 deletions(-) diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala index 83b74a4a05..5a5228f754 100644 --- a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala +++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala @@ -34,18 +34,23 @@ MapStorageBackend[Array[Byte], Array[Byte]] with val refStore = config.getString("akka.storage.voldemort.store.ref", "Refs") val mapKeyStore = config.getString("akka.storage.voldemort.store.map-key", "MapKeys") val mapValueStore = config.getString("akka.storage.voldemort.store.map-value", "MapValues") - val vectorSizeStore = config.getString("akka.storage.voldemort.store.vector-size", "VectorSizes") - val vectorValueStore = config.getString("akka.storage.voldemort.store.vector-value", "VectorValues") + val vectorStore = config.getString("akka.storage.voldemort.store.vector", "Vectors") + val queueStore = config.getString("akka.storage.voldemort.store.queue", "Queues") var storeClientFactory: StoreClientFactory = null var refClient: StoreClient[String, Array[Byte]] = null var mapKeyClient: StoreClient[String, Array[Byte]] = null var mapValueClient: StoreClient[Array[Byte], Array[Byte]] = null - var vectorSizeClient: StoreClient[String, Array[Byte]] = null - var vectorValueClient: StoreClient[Array[Byte], Array[Byte]] = null + var vectorClient: StoreClient[Array[Byte], Array[Byte]] = null + var queueClient: StoreClient[Array[Byte], Array[Byte]] = null initStoreClients val underscoreBytesUTF8 = "_".getBytes("UTF-8") + val vectorSizeIndex = -1 + val queueSizeKeyBytes = IntSerializer.toBytes(-1) + val queueHeadKeyBytes = IntSerializer.toBytes(-2) + val queueTailKeyBytes = IntSerializer.toBytes(-3) + implicit val byteOrder = new Ordering[Array[Byte]] { override def compare(x: Array[Byte], y: Array[Byte]) = ByteUtils.compare(x, y) } @@ -152,7 +157,7 @@ MapStorageBackend[Array[Byte], Array[Byte]] with def getVectorStorageSizeFor(name: String): Int = { - IntSerializer.fromBytes(vectorSizeClient.getValue(name, IntSerializer.toBytes(0))) + IntSerializer.fromBytes(vectorClient.getValue(getIndexedKey(name, vectorSizeIndex), IntSerializer.toBytes(0))) } @@ -167,10 +172,10 @@ MapStorageBackend[Array[Byte], Array[Byte]] with count } val seq: IndexedSeq[Array[Byte]] = (st until st + cnt).map { - index => getVectorValueKey(name, index) + index => getIndexedKey(name, index) } - val all: JMap[Array[Byte], Versioned[Array[Byte]]] = vectorValueClient.getAll(JavaConversions.asIterable(seq)) + val all: JMap[Array[Byte], Versioned[Array[Byte]]] = vectorClient.getAll(JavaConversions.asIterable(seq)) var storage = new ArrayBuffer[Array[Byte]](seq.size) storage = storage.padTo(seq.size, Array.empty[Byte]) @@ -189,14 +194,14 @@ MapStorageBackend[Array[Byte], Array[Byte]] with def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] = { - vectorValueClient.getValue(getVectorValueKey(name, index), Array.empty[Byte]) + vectorClient.getValue(getIndexedKey(name, index), Array.empty[Byte]) } def updateVectorStorageEntryFor(name: String, index: Int, elem: Array[Byte]) = { val size = getVectorStorageSizeFor(name) - vectorValueClient.put(getVectorValueKey(name, index), elem) + vectorClient.put(getIndexedKey(name, index), elem) if (size < index + 1) { - vectorSizeClient.put(name, IntSerializer.toBytes(index + 1)) + vectorClient.put(getIndexedKey(name, vectorSizeIndex), IntSerializer.toBytes(index + 1)) } } @@ -204,10 +209,10 @@ MapStorageBackend[Array[Byte], Array[Byte]] with var size = getVectorStorageSizeFor(name) elements.foreach { element => - vectorValueClient.put(getVectorValueKey(name, size), element) + vectorClient.put(getIndexedKey(name, size), element) size += 1 } - vectorSizeClient.put(name, IntSerializer.toBytes(size)) + vectorClient.put(getIndexedKey(name, vectorSizeIndex), IntSerializer.toBytes(size)) } def insertVectorStorageEntryFor(name: String, element: Array[Byte]) = { @@ -220,6 +225,7 @@ MapStorageBackend[Array[Byte], Array[Byte]] with * Store the length of owner as first byte to work around the rare case * where ownerbytes1 + keybytes1 == ownerbytes2 + keybytes2 but ownerbytes1 != ownerbytes2 */ + def getKey(owner: String, key: Array[Byte]): Array[Byte] = { val ownerBytes: Array[Byte] = owner.getBytes("UTF-8") val ownerLenghtBytes: Array[Byte] = IntSerializer.toBytes(owner.length) @@ -230,7 +236,7 @@ MapStorageBackend[Array[Byte], Array[Byte]] with theKey } - def getVectorValueKey(owner: String, index: Int): Array[Byte] = { + def getIndexedKey(owner: String, index: Int): Array[Byte] = { val indexbytes = IntSerializer.toBytes(index) val theIndexKey = new Array[Byte](underscoreBytesUTF8.length + indexbytes.length) System.arraycopy(underscoreBytesUTF8, 0, theIndexKey, 0, underscoreBytesUTF8.length) @@ -245,6 +251,7 @@ MapStorageBackend[Array[Byte], Array[Byte]] with } + def getClientConfig(configMap: Map[String, String]): Properties = { val properites = new Properties configMap.foreach { @@ -272,8 +279,8 @@ MapStorageBackend[Array[Byte], Array[Byte]] with refClient = storeClientFactory.getStoreClient(refStore) mapKeyClient = storeClientFactory.getStoreClient(mapKeyStore) mapValueClient = storeClientFactory.getStoreClient(mapValueStore) - vectorSizeClient = storeClientFactory.getStoreClient(vectorSizeStore) - vectorValueClient = storeClientFactory.getStoreClient(vectorValueStore) + vectorClient = storeClientFactory.getStoreClient(vectorStore) + queueClient = storeClientFactory.getStoreClient(queueStore) } object IntSerializer { diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml index 26832d93fe..de666a219f 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml +++ b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml @@ -15,7 +15,7 @@ identity - + MapValues 1 @@ -33,7 +33,7 @@ - MapKeys + MapKeys 1 1 1 @@ -50,7 +50,7 @@ - VectorValues + Vectors 1 1 1 @@ -66,7 +66,7 @@ - VectorSizes + Queues 1 1 1 @@ -75,11 +75,11 @@ memory client - string - utf8 + identity identity + \ No newline at end of file diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala index f76c370667..e39732dabf 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala @@ -108,12 +108,11 @@ Spec with override def beforeEach { removeMapStorageFor(state) var size = getVectorStorageSizeFor(tx) - (0 to size).foreach { + (-1 to size).foreach { index => { - vectorValueClient.delete(getVectorValueKey(tx, index)) + vectorClient.delete(getIndexedKey(tx, index)) } } - vectorSizeClient.delete(tx) } override def afterEach { diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala index aa5f88f020..5f27771bae 100644 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala +++ b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala @@ -82,34 +82,26 @@ class VoldemortStorageBackendSuite extends FunSuite with ShouldMatchers with Emb } - test("that vector size storage and retrieval works") { - val key = "vectorKey" - val size = IntSerializer.toBytes(17) - vectorSizeClient.delete(key) - vectorSizeClient.getValue(key, empty) should equal(empty) - vectorSizeClient.put(key, size) - vectorSizeClient.getValue(key) should equal(size) - } test("that vector value storage and retrieval works") { val key = "vectorValueKey" val index = 3 val value = bytes("some bytes") - val vecKey = getVectorValueKey(key, index) + val vecKey = getIndexedKey(key, index) getIndexFromVectorValueKey(key, vecKey) should be(index) - vectorValueClient.delete(vecKey) - vectorValueClient.getValue(vecKey, empty) should equal(empty) - vectorValueClient.put(vecKey, value) - vectorValueClient.getValue(vecKey) should equal(value) + vectorClient.delete(vecKey) + vectorClient.getValue(vecKey, empty) should equal(empty) + vectorClient.put(vecKey, value) + vectorClient.getValue(vecKey) should equal(value) } test("PersistentVector apis function as expected") { val key = "vectorApiKey" val value = bytes("Some bytes we want to store in a vector") val updatedValue = bytes("Some updated bytes we want to store in a vector") - vectorSizeClient.delete(key) - vectorValueClient.delete(getVectorValueKey(key, 0)) - vectorValueClient.delete(getVectorValueKey(key, 1)) + vectorClient.delete(getIndexedKey(key, vectorSizeIndex)) + vectorClient.delete(getIndexedKey(key, 0)) + vectorClient.delete(getIndexedKey(key, 1)) getVectorStorageEntryFor(key, 0) should be(empty) getVectorStorageEntryFor(key, 1) should be(empty) getVectorStorageRangeFor(key, None, None, 1).head should be(empty) diff --git a/config/akka-reference.conf b/config/akka-reference.conf index eec56c7f06..fd0d658ab4 100644 --- a/config/akka-reference.conf +++ b/config/akka-reference.conf @@ -171,11 +171,11 @@ akka { voldemort { store { - refs = "Refs" # Voldemort Store Used to Persist Refs. Use string serializer for keys, identity serializer for values + ref = "Refs" # Voldemort Store Used to Persist Refs. Use string serializer for keys, identity serializer for values map-keys = "MapKeys" # Voldemort Store Used to Persist Map Keys. Use string serializer for keys, identity serializer for values map-values = "MapValues" # Voldemort Store Used to Persist Map Values. Use identity serializer for keys, identity serializer for values - vector-sizes = "VectorSizes" # Voldemort Store Used to Persist Vector Sizes. Use string serializer for keys, identity serializer for values - vector-values = "VectorValues" # Voldemort Store Used to Persist Vector Values. Use identity serializer for keys, identity serializer for values + vector = "Vectors" # Voldemort Store Used to Persist Vector Sizes. Use string serializer for keys, identity serializer for values + queue = "Queues" # Voldemort Store Used to Persist Vector Values. Use identity serializer for keys, identity serializer for values } client { # The KeyValue pairs under client are converted to java Properties and used to construct the ClientConfig From 97ff092e009ff7ccb5e7beb008e4e068bbbbeec0 Mon Sep 17 00:00:00 2001 From: ticktock Date: Thu, 23 Sep 2010 22:58:24 -0400 Subject: [PATCH 42/52] More Queue impl --- .../main/scala/VoldemortStorageBackend.scala | 49 +++++++++++++++++-- 1 file changed, 45 insertions(+), 4 deletions(-) diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala index 5a5228f754..b5c1023970 100644 --- a/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala +++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/VoldemortStorageBackend.scala @@ -25,6 +25,7 @@ private[akka] object VoldemortStorageBackend extends MapStorageBackend[Array[Byte], Array[Byte]] with VectorStorageBackend[Array[Byte]] with RefStorageBackend[Array[Byte]] with + QueueStorageBackend[Array[Byte]] with Logging { val bootstrapUrlsProp = "bootstrap_urls" val clientConfig = config.getConfigMap("akka.storage.voldemort.client") match { @@ -47,9 +48,12 @@ MapStorageBackend[Array[Byte], Array[Byte]] with val underscoreBytesUTF8 = "_".getBytes("UTF-8") val vectorSizeIndex = -1 - val queueSizeKeyBytes = IntSerializer.toBytes(-1) - val queueHeadKeyBytes = IntSerializer.toBytes(-2) - val queueTailKeyBytes = IntSerializer.toBytes(-3) + val queueHeadIndex = -1 + val queueTailIndex = -2 + case class QueueMetadata(head: Int, tail: Int) { + def size = tail - head + //worry about wrapping etc + } implicit val byteOrder = new Ordering[Array[Byte]] { override def compare(x: Array[Byte], y: Array[Byte]) = ByteUtils.compare(x, y) @@ -220,12 +224,50 @@ MapStorageBackend[Array[Byte], Array[Byte]] with } + def remove(name: String): Boolean = { + false + } + + def peek(name: String, start: Int, count: Int): List[Array[Byte]] = { + List(Array.empty[Byte]) + } + + def size(name: String): Int = { + getQueueMetadata(name).size + } + + def dequeue(name: String): Option[Array[Byte]] = { + None + } + + def enqueue(name: String, item: Array[Byte]): Option[Int] = { + val mdata = getQueueMetadata(name) + val key = getIndexedKey(name, mdata.tail) + queueClient.put(key, item) + queueClient.put(getIndexedKey(name, queueTailIndex), IntSerializer.toBytes(mdata.tail + 1)) + Some (mdata.size + 1) + } + + + def getQueueMetadata(name: String): QueueMetadata = { + val keys = List(getIndexedKey(name, queueHeadIndex), getIndexedKey(name, queueTailIndex)) + val qdata = JavaConversions.asMap(queueClient.getAll(JavaConversions.asIterable(keys))) + val values = keys.map { + qdata.get(_) match { + case Some(versioned) => IntSerializer.fromBytes(versioned.getValue) + case None => 0 + } + } + QueueMetadata(values.head, values.tail.head) + } + /** * Concat the ownerlenght+owner+key+ of owner so owned data will be colocated * Store the length of owner as first byte to work around the rare case * where ownerbytes1 + keybytes1 == ownerbytes2 + keybytes2 but ownerbytes1 != ownerbytes2 */ + def getKey(owner: String, key: Array[Byte]): Array[Byte] = { val ownerBytes: Array[Byte] = owner.getBytes("UTF-8") val ownerLenghtBytes: Array[Byte] = IntSerializer.toBytes(owner.length) @@ -251,7 +293,6 @@ MapStorageBackend[Array[Byte], Array[Byte]] with } - def getClientConfig(configMap: Map[String, String]): Properties = { val properites = new Properties configMap.foreach { From e92b51d7dfea220e5d8b9d833b2fe5789f30a89d Mon Sep 17 00:00:00 2001 From: David Greco Date: Fri, 24 Sep 2010 09:15:52 +0200 Subject: [PATCH 43/52] Aligned the hbase test to the new mechanism for optionally running integration tests --- ...ePersistentActorSpecTestIntegration.scala} | 2 +- .../src/test/scala/HbaseStorageSpecTest.scala | 177 --------- .../test/scala/HbaseTicket343SpecTest.scala | 347 ------------------ .../src/test/scala/SimpleHbaseSpecTest.scala | 62 ---- 4 files changed, 1 insertion(+), 587 deletions(-) rename akka-persistence/akka-persistence-hbase/src/test/scala/{HbasePersistentActorSpecTest.scala => HbasePersistentActorSpecTestIntegration.scala} (98%) delete mode 100644 akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTest.scala delete mode 100644 akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTest.scala delete mode 100644 akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTest.scala diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTest.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTestIntegration.scala similarity index 98% rename from akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTest.scala rename to akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTestIntegration.scala index 468cd800ce..fc496ed480 100644 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTest.scala +++ b/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTestIntegration.scala @@ -76,7 +76,7 @@ class PersistentFailerActor extends Transactor { } } -class HbasePersistentActorSpec extends JUnitSuite with BeforeAndAfterAll { +class HbasePersistentActorSpecTestIntegration extends JUnitSuite with BeforeAndAfterAll { val testUtil = new HBaseTestingUtility diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTest.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTest.scala deleted file mode 100644 index 1bad777675..0000000000 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTest.scala +++ /dev/null @@ -1,177 +0,0 @@ -package se.scalablesolutions.akka.persistence.hbase - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.BeforeAndAfterEach - -class HbaseStorageSpec extends -Spec with -ShouldMatchers with -BeforeAndAfterAll with -BeforeAndAfterEach { - - import org.apache.hadoop.hbase.HBaseTestingUtility - - val testUtil = new HBaseTestingUtility - - override def beforeAll { - testUtil.startMiniCluster - } - - override def afterAll { - testUtil.shutdownMiniCluster - } - - override def beforeEach { - HbaseStorageBackend.drop - } - - override def afterEach { - HbaseStorageBackend.drop - } - - describe("persistent maps") { - it("should insert with single key and value") { - import HbaseStorageBackend._ - - insertMapStorageEntryFor("t1", "odersky".getBytes, "scala".getBytes) - insertMapStorageEntryFor("t1", "gosling".getBytes, "java".getBytes) - insertMapStorageEntryFor("t1", "stroustrup".getBytes, "c++".getBytes) - getMapStorageSizeFor("t1") should equal(3) - new String(getMapStorageEntryFor("t1", "odersky".getBytes).get) should equal("scala") - new String(getMapStorageEntryFor("t1", "gosling".getBytes).get) should equal("java") - new String(getMapStorageEntryFor("t1", "stroustrup".getBytes).get) should equal("c++") - getMapStorageEntryFor("t1", "torvalds".getBytes) should equal(None) - } - - it("should insert with multiple keys and values") { - import HbaseStorageBackend._ - - val l = List(("stroustrup", "c++"), ("odersky", "scala"), ("gosling", "java")) - insertMapStorageEntriesFor("t1", l.map { case (k, v) => (k.getBytes, v.getBytes) }) - getMapStorageSizeFor("t1") should equal(3) - new String(getMapStorageEntryFor("t1", "stroustrup".getBytes).get) should equal("c++") - new String(getMapStorageEntryFor("t1", "gosling".getBytes).get) should equal("java") - new String(getMapStorageEntryFor("t1", "odersky".getBytes).get) should equal("scala") - getMapStorageEntryFor("t1", "torvalds".getBytes) should equal(None) - - getMapStorageEntryFor("t2", "torvalds".getBytes) should equal(None) - - getMapStorageFor("t1").map { case (k, v) => (new String(k), new String(v)) } should equal (l) - - removeMapStorageFor("t1", "gosling".getBytes) - getMapStorageSizeFor("t1") should equal(2) - - removeMapStorageFor("t1") - getMapStorageSizeFor("t1") should equal(0) - } - - it("should do proper range queries") { - import HbaseStorageBackend._ - val l = List( - ("bjarne stroustrup", "c++"), - ("martin odersky", "scala"), - ("james gosling", "java"), - ("yukihiro matsumoto", "ruby"), - ("slava pestov", "factor"), - ("rich hickey", "clojure"), - ("ola bini", "ioke"), - ("dennis ritchie", "c"), - ("larry wall", "perl"), - ("guido van rossum", "python"), - ("james strachan", "groovy")) - val rl = List( - ("james gosling", "java"), - ("james strachan", "groovy"), - ("larry wall", "perl"), - ("martin odersky", "scala"), - ("ola bini", "ioke"), ("rich hickey", "clojure"), - ("slava pestov", "factor")) - insertMapStorageEntriesFor("t1", l.map { case (k, v) => (k.getBytes, v.getBytes) }) - getMapStorageSizeFor("t1") should equal(l.size) - getMapStorageRangeFor("t1", None, None, 100).map { case (k, v) => (new String(k), new String(v)) } should equal(l.sortWith(_._1 < _._1)) - getMapStorageRangeFor("t1", Option("james gosling".getBytes), Option("slava pestov".getBytes), 100).map { case (k, v) => (new String(k), new String(v)) } should equal(rl.sortWith(_._1 < _._1)) - getMapStorageRangeFor("t1", None, None, 5).map { case (k, v) => (new String(k), new String(v)) }.size should equal(5) - } - - } - - describe("persistent vectors") { - it("should insert a single value") { - import HbaseStorageBackend._ - - insertVectorStorageEntryFor("t1", "martin odersky".getBytes) - insertVectorStorageEntryFor("t1", "james gosling".getBytes) - new String(getVectorStorageEntryFor("t1", 0)) should equal("james gosling") - new String(getVectorStorageEntryFor("t1", 1)) should equal("martin odersky") - } - - it("should insert multiple values") { - import HbaseStorageBackend._ - - insertVectorStorageEntryFor("t1", "martin odersky".getBytes) - insertVectorStorageEntryFor("t1", "james gosling".getBytes) - insertVectorStorageEntriesFor("t1", List("ola bini".getBytes, "james strachan".getBytes, "dennis ritchie".getBytes)) - new String(getVectorStorageEntryFor("t1", 0)) should equal("ola bini") - new String(getVectorStorageEntryFor("t1", 1)) should equal("james strachan") - new String(getVectorStorageEntryFor("t1", 2)) should equal("dennis ritchie") - new String(getVectorStorageEntryFor("t1", 3)) should equal("james gosling") - new String(getVectorStorageEntryFor("t1", 4)) should equal("martin odersky") - } - - it("should fetch a range of values") { - import HbaseStorageBackend._ - - insertVectorStorageEntryFor("t1", "martin odersky".getBytes) - insertVectorStorageEntryFor("t1", "james gosling".getBytes) - getVectorStorageSizeFor("t1") should equal(2) - insertVectorStorageEntriesFor("t1", List("ola bini".getBytes, "james strachan".getBytes, "dennis ritchie".getBytes)) - getVectorStorageRangeFor("t1", None, None, 100).map(new String(_)) should equal(List("ola bini", "james strachan", "dennis ritchie", "james gosling", "martin odersky")) - getVectorStorageRangeFor("t1", Some(0), Some(5), 100).map(new String(_)) should equal(List("ola bini", "james strachan", "dennis ritchie", "james gosling", "martin odersky")) - getVectorStorageRangeFor("t1", Some(2), Some(5), 100).map(new String(_)) should equal(List("dennis ritchie", "james gosling", "martin odersky")) - getVectorStorageRangeFor("t1", Some(0), Some(0), 100).size should equal(0) - getVectorStorageSizeFor("t1") should equal(5) - } - - it("should insert and query complex structures") { - import HbaseStorageBackend._ - import sjson.json.DefaultProtocol._ - import sjson.json.JsonSerialization._ - - // a list[AnyRef] should be added successfully - val l = List("ola bini".getBytes, tobinary(List(100, 200, 300)), tobinary(List(1, 2, 3))) - - // for id = t1 - insertVectorStorageEntriesFor("t1", l) - new String(getVectorStorageEntryFor("t1", 0)) should equal("ola bini") - frombinary[List[Int]](getVectorStorageEntryFor("t1", 1)) should equal(List(100, 200, 300)) - frombinary[List[Int]](getVectorStorageEntryFor("t1", 2)) should equal(List(1, 2, 3)) - - getVectorStorageSizeFor("t1") should equal(3) - - // some more for id = t1 - val m = List(tobinary(Map(1 -> "dg", 2 -> "mc", 3 -> "nd")), tobinary(List("martin odersky", "james gosling"))) - insertVectorStorageEntriesFor("t1", m) - - // size should add up - getVectorStorageSizeFor("t1") should equal(5) - - // now for a diff id - insertVectorStorageEntriesFor("t2", l) - getVectorStorageSizeFor("t2") should equal(3) - } - } - - describe("persistent refs") { - it("should insert a ref") { - import HbaseStorageBackend._ - - insertRefStorageFor("t1", "martin odersky".getBytes) - new String(getRefStorageFor("t1").get) should equal("martin odersky") - insertRefStorageFor("t1", "james gosling".getBytes) - new String(getRefStorageFor("t1").get) should equal("james gosling") - getRefStorageFor("t2") should equal(None) - } - } -} diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTest.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTest.scala deleted file mode 100644 index d61b82fa87..0000000000 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTest.scala +++ /dev/null @@ -1,347 +0,0 @@ -package se.scalablesolutions.akka.persistence.hbase - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach} -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import se.scalablesolutions.akka.actor.{Actor, ActorRef} -import se.scalablesolutions.akka.config.OneForOneStrategy -import Actor._ -import se.scalablesolutions.akka.stm.global._ -import se.scalablesolutions.akka.config.ScalaConfig._ -import se.scalablesolutions.akka.util.Logging - -import HbaseStorageBackend._ - -case class GET(k: String) -case class SET(k: String, v: String) -case class REM(k: String) -case class CONTAINS(k: String) -case object MAP_SIZE -case class MSET(kvs: List[(String, String)]) -case class REMOVE_AFTER_PUT(kvsToAdd: List[(String, String)], ksToRem: List[String]) -case class CLEAR_AFTER_PUT(kvsToAdd: List[(String, String)]) -case class PUT_WITH_SLICE(kvsToAdd: List[(String, String)], start: String, cnt: Int) -case class PUT_REM_WITH_SLICE(kvsToAdd: List[(String, String)], ksToRem: List[String], start: String, cnt: Int) - -case class VADD(v: String) -case class VUPD(i: Int, v: String) -case class VUPD_AND_ABORT(i: Int, v: String) -case class VGET(i: Int) -case object VSIZE -case class VGET_AFTER_VADD(vsToAdd: List[String], isToFetch: List[Int]) -case class VADD_WITH_SLICE(vsToAdd: List[String], start: Int, cnt: Int) - -object Storage { - class HbaseSampleMapStorage extends Actor { - self.lifeCycle = Some(LifeCycle(Permanent)) - val FOO_MAP = "akka.sample.map" - - private var fooMap = atomic { HbaseStorage.getMap(FOO_MAP) } - - def receive = { - case SET(k, v) => - atomic { - fooMap += (k.getBytes, v.getBytes) - } - self.reply((k, v)) - - case GET(k) => - val v = atomic { - fooMap.get(k.getBytes).map(new String(_)).getOrElse(k + " Not found") - } - self.reply(v) - - case REM(k) => - val v = atomic { - fooMap -= k.getBytes - } - self.reply(k) - - case CONTAINS(k) => - val v = atomic { - fooMap contains k.getBytes - } - self.reply(v) - - case MAP_SIZE => - val v = atomic { - fooMap.size - } - self.reply(v) - - case MSET(kvs) => atomic { - kvs.foreach {kv => fooMap += (kv._1.getBytes, kv._2.getBytes) } - } - self.reply(kvs.size) - - case REMOVE_AFTER_PUT(kvs2add, ks2rem) => atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - - ks2rem.foreach {k => - fooMap -= k.getBytes - }} - self.reply(fooMap.size) - - case CLEAR_AFTER_PUT(kvs2add) => atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - fooMap.clear - } - self.reply(true) - - case PUT_WITH_SLICE(kvs2add, from, cnt) => - val v = atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - fooMap.slice(Some(from.getBytes), cnt) - } - self.reply(v: List[(Array[Byte], Array[Byte])]) - - case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) => - val v = atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - ks2rem.foreach {k => - fooMap -= k.getBytes - } - fooMap.slice(Some(from.getBytes), cnt) - } - self.reply(v: List[(Array[Byte], Array[Byte])]) - } - } - - class HbaseSampleVectorStorage extends Actor { - self.lifeCycle = Some(LifeCycle(Permanent)) - val FOO_VECTOR = "akka.sample.vector" - - private var fooVector = atomic { HbaseStorage.getVector(FOO_VECTOR) } - - def receive = { - case VADD(v) => - val size = - atomic { - fooVector + v.getBytes - fooVector length - } - self.reply(size) - - case VGET(index) => - val ind = - atomic { - fooVector get index - } - self.reply(ind) - - case VGET_AFTER_VADD(vs, is) => - val els = - atomic { - vs.foreach(fooVector + _.getBytes) - (is.foldRight(List[Array[Byte]]())(fooVector.get(_) :: _)).map(new String(_)) - } - self.reply(els) - - case VUPD_AND_ABORT(index, value) => - val l = - atomic { - fooVector.update(index, value.getBytes) - // force fail - fooVector get 100 - } - self.reply(index) - - case VADD_WITH_SLICE(vs, s, c) => - val l = - atomic { - vs.foreach(fooVector + _.getBytes) - fooVector.slice(Some(s), None, c) - } - self.reply(l.map(new String(_))) - } - } -} - -import Storage._ - -@RunWith(classOf[JUnitRunner]) -class HbaseTicket343Spec extends Spec with ShouldMatchers with BeforeAndAfterAll with BeforeAndAfterEach { - - import org.apache.hadoop.hbase.HBaseTestingUtility - - val testUtil = new HBaseTestingUtility - - override def beforeAll { - testUtil.startMiniCluster - } - - override def afterAll { - testUtil.shutdownMiniCluster - } - - override def beforeEach { - HbaseStorageBackend.drop - } - - override def afterEach { - HbaseStorageBackend.drop - } - - describe("Ticket 343 Issue #1") { - it("remove after put should work within the same transaction") { - val proc = actorOf[HbaseSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - - (proc !! GET("dg")).getOrElse("Get failed") should equal("1") - (proc !! GET("mc")).getOrElse("Get failed") should equal("2") - (proc !! GET("nd")).getOrElse("Get failed") should equal("3") - - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - val add = List(("a", "1"), ("b", "2"), ("c", "3")) - val rem = List("a", "debasish") - (proc !! REMOVE_AFTER_PUT(add, rem)).getOrElse("REMOVE_AFTER_PUT failed") should equal(5) - - (proc !! GET("debasish")).getOrElse("debasish not found") should equal("debasish Not found") - (proc !! GET("a")).getOrElse("a not found") should equal("a Not found") - - (proc !! GET("b")).getOrElse("b not found") should equal("2") - - (proc !! CONTAINS("b")).getOrElse("b not found") should equal(true) - (proc !! CONTAINS("debasish")).getOrElse("debasish not found") should equal(false) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(5) - proc.stop - } - } - - describe("Ticket 343 Issue #2") { - it("clear after put should work within the same transaction") { - val proc = actorOf[HbaseSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - val add = List(("a", "1"), ("b", "2"), ("c", "3")) - (proc !! CLEAR_AFTER_PUT(add)).getOrElse("CLEAR_AFTER_PUT failed") should equal(true) - - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(0) - proc.stop - } - } - - describe("Ticket 343 Issue #3") { - it("map size should change after the transaction") { - val proc = actorOf[HbaseSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - (proc !! GET("dg")).getOrElse("Get failed") should equal("1") - (proc !! GET("mc")).getOrElse("Get failed") should equal("2") - (proc !! GET("nd")).getOrElse("Get failed") should equal("3") - proc.stop - } - } - - describe("slice test") { - it("should pass") { - val proc = actorOf[HbaseSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - // (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - (proc !! PUT_WITH_SLICE(List(("ec", "1"), ("tb", "2"), ("mc", "10")), "dg", 3)).get.asInstanceOf[List[(Array[Byte], Array[Byte])]].map { case (k, v) => (new String(k), new String(v)) } should equal(List(("dg", "1"), ("ec", "1"), ("mc", "10"))) - - (proc !! PUT_REM_WITH_SLICE(List(("fc", "1"), ("gb", "2"), ("xy", "10")), List("tb", "fc"), "dg", 5)).get.asInstanceOf[List[(Array[Byte], Array[Byte])]].map { case (k, v) => (new String(k), new String(v)) } should equal(List(("dg", "1"), ("ec", "1"), ("gb", "2"), ("mc", "10"), ("nd", "3"))) - proc.stop - } - } - - describe("Ticket 343 Issue #4") { - it("vector get should not ignore elements that were in vector before transaction") { - - val proc = actorOf[HbaseSampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - new String((proc !! VGET(0)).get.asInstanceOf[Array[Byte]] ) should equal("nilanjan") - new String((proc !! VGET(1)).get.asInstanceOf[Array[Byte]] ) should equal("ramanendu") - new String((proc !! VGET(2)).get.asInstanceOf[Array[Byte]] ) should equal("maulindu") - new String((proc !! VGET(3)).get.asInstanceOf[Array[Byte]] ) should equal("debasish") - - // now add 3 more and do gets in the same transaction - (proc !! VGET_AFTER_VADD(List("a", "b", "c"), List(0, 2, 4))).get.asInstanceOf[List[String]] should equal(List("c", "a", "ramanendu")) - proc.stop - } - } - - describe("Ticket 343 Issue #6") { - it("vector update should not ignore transaction") { - val proc = actorOf[HbaseSampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - evaluating { - (proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed") - } should produce [Exception] - - // update aborts and hence values will remain unchanged - new String((proc !! VGET(0)).get.asInstanceOf[Array[Byte]] ) should equal("nilanjan") - proc.stop - } - } - - describe("Ticket 343 Issue #5") { - it("vector slice() should not ignore elements added in current transaction") { - val proc = actorOf[HbaseSampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - // slice with no new elements added in current transaction - (proc !! VADD_WITH_SLICE(List(), 2, 2)).getOrElse("VADD_WITH_SLICE failed") should equal(Vector("maulindu", "debasish")) - - // slice with new elements added in current transaction - (proc !! VADD_WITH_SLICE(List("a", "b", "c", "d"), 2, 2)).getOrElse("VADD_WITH_SLICE failed") should equal(Vector("b", "a")) - proc.stop - } - } -} diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTest.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTest.scala deleted file mode 100644 index 883e94b5eb..0000000000 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTest.scala +++ /dev/null @@ -1,62 +0,0 @@ -package se.scalablesolutions.akka.persistence.hbase - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith -import org.junit.Test - -import org.apache.hadoop.hbase.HBaseTestingUtility - -@RunWith(classOf[JUnitRunner]) -class PersistenceSpecTest extends Spec with BeforeAndAfterAll with ShouldMatchers { - - import org.apache.hadoop.hbase.HBaseTestingUtility - - val testUtil = new HBaseTestingUtility - - override def beforeAll { - testUtil.startMiniCluster - } - - override def afterAll { - testUtil.shutdownMiniCluster - } - - describe("simple hbase persistence test") { - it("should create a table") { - import org.apache.hadoop.hbase.util.Bytes - import org.apache.hadoop.hbase.HTableDescriptor - import org.apache.hadoop.hbase.HColumnDescriptor - import org.apache.hadoop.hbase.client.HBaseAdmin - import org.apache.hadoop.hbase.client.HTable - - val descriptor = new HTableDescriptor(Bytes.toBytes("ATable")) - descriptor.addFamily(new HColumnDescriptor(Bytes.toBytes("Family1"))) - descriptor.addFamily(new HColumnDescriptor(Bytes.toBytes("Family2"))) - val admin = new HBaseAdmin(testUtil.getConfiguration) - admin.createTable(descriptor) - val table = new HTable(testUtil.getConfiguration, Bytes.toBytes("ATable")) - - table should not equal (null) - } - - it("should use the quorum read from the akka configuration and access the table") { - import se.scalablesolutions.akka.config.Config.config - import org.apache.hadoop.hbase.HBaseConfiguration - import org.apache.hadoop.hbase.client.HBaseAdmin - import org.apache.hadoop.hbase.client.HTable - - val HBASE_ZOOKEEPER_QUORUM = config.getString("akka.storage.hbase.zookeeper-quorum", "0") - HBASE_ZOOKEEPER_QUORUM should not equal ("0") - HBASE_ZOOKEEPER_QUORUM should equal("localhost") - - val configuration = new HBaseConfiguration - configuration.set("hbase.zookeeper.quorum", HBASE_ZOOKEEPER_QUORUM) - val admin = new HBaseAdmin(configuration) - admin.tableExists("ATable") should equal(true) - } - } - -} From 76283b43b01d97ea22a8b4ca080d54e1e9d7af37 Mon Sep 17 00:00:00 2001 From: David Greco Date: Fri, 24 Sep 2010 09:17:29 +0200 Subject: [PATCH 44/52] Aligned the hbase test to the new mechanism for optionally running integration tests --- .../HbaseStorageSpecTestIntegration.scala | 177 +++++++++ .../HbaseTicket343SpecTestIntegration.scala | 347 ++++++++++++++++++ .../SimpleHbaseSpecTestIntegration.scala | 62 ++++ 3 files changed, 586 insertions(+) create mode 100644 akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTestIntegration.scala create mode 100644 akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTestIntegration.scala create mode 100644 akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTestIntegration.scala diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTestIntegration.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTestIntegration.scala new file mode 100644 index 0000000000..4d118850f0 --- /dev/null +++ b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTestIntegration.scala @@ -0,0 +1,177 @@ +package se.scalablesolutions.akka.persistence.hbase + +import org.scalatest.Spec +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.BeforeAndAfterEach + +class HbaseStorageSpecTestIntegration extends +Spec with +ShouldMatchers with +BeforeAndAfterAll with +BeforeAndAfterEach { + + import org.apache.hadoop.hbase.HBaseTestingUtility + + val testUtil = new HBaseTestingUtility + + override def beforeAll { + testUtil.startMiniCluster + } + + override def afterAll { + testUtil.shutdownMiniCluster + } + + override def beforeEach { + HbaseStorageBackend.drop + } + + override def afterEach { + HbaseStorageBackend.drop + } + + describe("persistent maps") { + it("should insert with single key and value") { + import HbaseStorageBackend._ + + insertMapStorageEntryFor("t1", "odersky".getBytes, "scala".getBytes) + insertMapStorageEntryFor("t1", "gosling".getBytes, "java".getBytes) + insertMapStorageEntryFor("t1", "stroustrup".getBytes, "c++".getBytes) + getMapStorageSizeFor("t1") should equal(3) + new String(getMapStorageEntryFor("t1", "odersky".getBytes).get) should equal("scala") + new String(getMapStorageEntryFor("t1", "gosling".getBytes).get) should equal("java") + new String(getMapStorageEntryFor("t1", "stroustrup".getBytes).get) should equal("c++") + getMapStorageEntryFor("t1", "torvalds".getBytes) should equal(None) + } + + it("should insert with multiple keys and values") { + import HbaseStorageBackend._ + + val l = List(("stroustrup", "c++"), ("odersky", "scala"), ("gosling", "java")) + insertMapStorageEntriesFor("t1", l.map { case (k, v) => (k.getBytes, v.getBytes) }) + getMapStorageSizeFor("t1") should equal(3) + new String(getMapStorageEntryFor("t1", "stroustrup".getBytes).get) should equal("c++") + new String(getMapStorageEntryFor("t1", "gosling".getBytes).get) should equal("java") + new String(getMapStorageEntryFor("t1", "odersky".getBytes).get) should equal("scala") + getMapStorageEntryFor("t1", "torvalds".getBytes) should equal(None) + + getMapStorageEntryFor("t2", "torvalds".getBytes) should equal(None) + + getMapStorageFor("t1").map { case (k, v) => (new String(k), new String(v)) } should equal (l) + + removeMapStorageFor("t1", "gosling".getBytes) + getMapStorageSizeFor("t1") should equal(2) + + removeMapStorageFor("t1") + getMapStorageSizeFor("t1") should equal(0) + } + + it("should do proper range queries") { + import HbaseStorageBackend._ + val l = List( + ("bjarne stroustrup", "c++"), + ("martin odersky", "scala"), + ("james gosling", "java"), + ("yukihiro matsumoto", "ruby"), + ("slava pestov", "factor"), + ("rich hickey", "clojure"), + ("ola bini", "ioke"), + ("dennis ritchie", "c"), + ("larry wall", "perl"), + ("guido van rossum", "python"), + ("james strachan", "groovy")) + val rl = List( + ("james gosling", "java"), + ("james strachan", "groovy"), + ("larry wall", "perl"), + ("martin odersky", "scala"), + ("ola bini", "ioke"), ("rich hickey", "clojure"), + ("slava pestov", "factor")) + insertMapStorageEntriesFor("t1", l.map { case (k, v) => (k.getBytes, v.getBytes) }) + getMapStorageSizeFor("t1") should equal(l.size) + getMapStorageRangeFor("t1", None, None, 100).map { case (k, v) => (new String(k), new String(v)) } should equal(l.sortWith(_._1 < _._1)) + getMapStorageRangeFor("t1", Option("james gosling".getBytes), Option("slava pestov".getBytes), 100).map { case (k, v) => (new String(k), new String(v)) } should equal(rl.sortWith(_._1 < _._1)) + getMapStorageRangeFor("t1", None, None, 5).map { case (k, v) => (new String(k), new String(v)) }.size should equal(5) + } + + } + + describe("persistent vectors") { + it("should insert a single value") { + import HbaseStorageBackend._ + + insertVectorStorageEntryFor("t1", "martin odersky".getBytes) + insertVectorStorageEntryFor("t1", "james gosling".getBytes) + new String(getVectorStorageEntryFor("t1", 0)) should equal("james gosling") + new String(getVectorStorageEntryFor("t1", 1)) should equal("martin odersky") + } + + it("should insert multiple values") { + import HbaseStorageBackend._ + + insertVectorStorageEntryFor("t1", "martin odersky".getBytes) + insertVectorStorageEntryFor("t1", "james gosling".getBytes) + insertVectorStorageEntriesFor("t1", List("ola bini".getBytes, "james strachan".getBytes, "dennis ritchie".getBytes)) + new String(getVectorStorageEntryFor("t1", 0)) should equal("ola bini") + new String(getVectorStorageEntryFor("t1", 1)) should equal("james strachan") + new String(getVectorStorageEntryFor("t1", 2)) should equal("dennis ritchie") + new String(getVectorStorageEntryFor("t1", 3)) should equal("james gosling") + new String(getVectorStorageEntryFor("t1", 4)) should equal("martin odersky") + } + + it("should fetch a range of values") { + import HbaseStorageBackend._ + + insertVectorStorageEntryFor("t1", "martin odersky".getBytes) + insertVectorStorageEntryFor("t1", "james gosling".getBytes) + getVectorStorageSizeFor("t1") should equal(2) + insertVectorStorageEntriesFor("t1", List("ola bini".getBytes, "james strachan".getBytes, "dennis ritchie".getBytes)) + getVectorStorageRangeFor("t1", None, None, 100).map(new String(_)) should equal(List("ola bini", "james strachan", "dennis ritchie", "james gosling", "martin odersky")) + getVectorStorageRangeFor("t1", Some(0), Some(5), 100).map(new String(_)) should equal(List("ola bini", "james strachan", "dennis ritchie", "james gosling", "martin odersky")) + getVectorStorageRangeFor("t1", Some(2), Some(5), 100).map(new String(_)) should equal(List("dennis ritchie", "james gosling", "martin odersky")) + getVectorStorageRangeFor("t1", Some(0), Some(0), 100).size should equal(0) + getVectorStorageSizeFor("t1") should equal(5) + } + + it("should insert and query complex structures") { + import HbaseStorageBackend._ + import sjson.json.DefaultProtocol._ + import sjson.json.JsonSerialization._ + + // a list[AnyRef] should be added successfully + val l = List("ola bini".getBytes, tobinary(List(100, 200, 300)), tobinary(List(1, 2, 3))) + + // for id = t1 + insertVectorStorageEntriesFor("t1", l) + new String(getVectorStorageEntryFor("t1", 0)) should equal("ola bini") + frombinary[List[Int]](getVectorStorageEntryFor("t1", 1)) should equal(List(100, 200, 300)) + frombinary[List[Int]](getVectorStorageEntryFor("t1", 2)) should equal(List(1, 2, 3)) + + getVectorStorageSizeFor("t1") should equal(3) + + // some more for id = t1 + val m = List(tobinary(Map(1 -> "dg", 2 -> "mc", 3 -> "nd")), tobinary(List("martin odersky", "james gosling"))) + insertVectorStorageEntriesFor("t1", m) + + // size should add up + getVectorStorageSizeFor("t1") should equal(5) + + // now for a diff id + insertVectorStorageEntriesFor("t2", l) + getVectorStorageSizeFor("t2") should equal(3) + } + } + + describe("persistent refs") { + it("should insert a ref") { + import HbaseStorageBackend._ + + insertRefStorageFor("t1", "martin odersky".getBytes) + new String(getRefStorageFor("t1").get) should equal("martin odersky") + insertRefStorageFor("t1", "james gosling".getBytes) + new String(getRefStorageFor("t1").get) should equal("james gosling") + getRefStorageFor("t2") should equal(None) + } + } +} diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTestIntegration.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTestIntegration.scala new file mode 100644 index 0000000000..26210ba52f --- /dev/null +++ b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTestIntegration.scala @@ -0,0 +1,347 @@ +package se.scalablesolutions.akka.persistence.hbase + +import org.scalatest.Spec +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach} +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import se.scalablesolutions.akka.actor.{Actor, ActorRef} +import se.scalablesolutions.akka.config.OneForOneStrategy +import Actor._ +import se.scalablesolutions.akka.stm.global._ +import se.scalablesolutions.akka.config.ScalaConfig._ +import se.scalablesolutions.akka.util.Logging + +import HbaseStorageBackend._ + +case class GET(k: String) +case class SET(k: String, v: String) +case class REM(k: String) +case class CONTAINS(k: String) +case object MAP_SIZE +case class MSET(kvs: List[(String, String)]) +case class REMOVE_AFTER_PUT(kvsToAdd: List[(String, String)], ksToRem: List[String]) +case class CLEAR_AFTER_PUT(kvsToAdd: List[(String, String)]) +case class PUT_WITH_SLICE(kvsToAdd: List[(String, String)], start: String, cnt: Int) +case class PUT_REM_WITH_SLICE(kvsToAdd: List[(String, String)], ksToRem: List[String], start: String, cnt: Int) + +case class VADD(v: String) +case class VUPD(i: Int, v: String) +case class VUPD_AND_ABORT(i: Int, v: String) +case class VGET(i: Int) +case object VSIZE +case class VGET_AFTER_VADD(vsToAdd: List[String], isToFetch: List[Int]) +case class VADD_WITH_SLICE(vsToAdd: List[String], start: Int, cnt: Int) + +object Storage { + class HbaseSampleMapStorage extends Actor { + self.lifeCycle = Some(LifeCycle(Permanent)) + val FOO_MAP = "akka.sample.map" + + private var fooMap = atomic { HbaseStorage.getMap(FOO_MAP) } + + def receive = { + case SET(k, v) => + atomic { + fooMap += (k.getBytes, v.getBytes) + } + self.reply((k, v)) + + case GET(k) => + val v = atomic { + fooMap.get(k.getBytes).map(new String(_)).getOrElse(k + " Not found") + } + self.reply(v) + + case REM(k) => + val v = atomic { + fooMap -= k.getBytes + } + self.reply(k) + + case CONTAINS(k) => + val v = atomic { + fooMap contains k.getBytes + } + self.reply(v) + + case MAP_SIZE => + val v = atomic { + fooMap.size + } + self.reply(v) + + case MSET(kvs) => atomic { + kvs.foreach {kv => fooMap += (kv._1.getBytes, kv._2.getBytes) } + } + self.reply(kvs.size) + + case REMOVE_AFTER_PUT(kvs2add, ks2rem) => atomic { + kvs2add.foreach {kv => + fooMap += (kv._1.getBytes, kv._2.getBytes) + } + + ks2rem.foreach {k => + fooMap -= k.getBytes + }} + self.reply(fooMap.size) + + case CLEAR_AFTER_PUT(kvs2add) => atomic { + kvs2add.foreach {kv => + fooMap += (kv._1.getBytes, kv._2.getBytes) + } + fooMap.clear + } + self.reply(true) + + case PUT_WITH_SLICE(kvs2add, from, cnt) => + val v = atomic { + kvs2add.foreach {kv => + fooMap += (kv._1.getBytes, kv._2.getBytes) + } + fooMap.slice(Some(from.getBytes), cnt) + } + self.reply(v: List[(Array[Byte], Array[Byte])]) + + case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) => + val v = atomic { + kvs2add.foreach {kv => + fooMap += (kv._1.getBytes, kv._2.getBytes) + } + ks2rem.foreach {k => + fooMap -= k.getBytes + } + fooMap.slice(Some(from.getBytes), cnt) + } + self.reply(v: List[(Array[Byte], Array[Byte])]) + } + } + + class HbaseSampleVectorStorage extends Actor { + self.lifeCycle = Some(LifeCycle(Permanent)) + val FOO_VECTOR = "akka.sample.vector" + + private var fooVector = atomic { HbaseStorage.getVector(FOO_VECTOR) } + + def receive = { + case VADD(v) => + val size = + atomic { + fooVector + v.getBytes + fooVector length + } + self.reply(size) + + case VGET(index) => + val ind = + atomic { + fooVector get index + } + self.reply(ind) + + case VGET_AFTER_VADD(vs, is) => + val els = + atomic { + vs.foreach(fooVector + _.getBytes) + (is.foldRight(List[Array[Byte]]())(fooVector.get(_) :: _)).map(new String(_)) + } + self.reply(els) + + case VUPD_AND_ABORT(index, value) => + val l = + atomic { + fooVector.update(index, value.getBytes) + // force fail + fooVector get 100 + } + self.reply(index) + + case VADD_WITH_SLICE(vs, s, c) => + val l = + atomic { + vs.foreach(fooVector + _.getBytes) + fooVector.slice(Some(s), None, c) + } + self.reply(l.map(new String(_))) + } + } +} + +import Storage._ + +@RunWith(classOf[JUnitRunner]) +class HbaseTicket343SpecTestIntegration extends Spec with ShouldMatchers with BeforeAndAfterAll with BeforeAndAfterEach { + + import org.apache.hadoop.hbase.HBaseTestingUtility + + val testUtil = new HBaseTestingUtility + + override def beforeAll { + testUtil.startMiniCluster + } + + override def afterAll { + testUtil.shutdownMiniCluster + } + + override def beforeEach { + HbaseStorageBackend.drop + } + + override def afterEach { + HbaseStorageBackend.drop + } + + describe("Ticket 343 Issue #1") { + it("remove after put should work within the same transaction") { + val proc = actorOf[HbaseSampleMapStorage] + proc.start + + (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) + (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") + (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) + + (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) + + (proc !! GET("dg")).getOrElse("Get failed") should equal("1") + (proc !! GET("mc")).getOrElse("Get failed") should equal("2") + (proc !! GET("nd")).getOrElse("Get failed") should equal("3") + + (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) + + val add = List(("a", "1"), ("b", "2"), ("c", "3")) + val rem = List("a", "debasish") + (proc !! REMOVE_AFTER_PUT(add, rem)).getOrElse("REMOVE_AFTER_PUT failed") should equal(5) + + (proc !! GET("debasish")).getOrElse("debasish not found") should equal("debasish Not found") + (proc !! GET("a")).getOrElse("a not found") should equal("a Not found") + + (proc !! GET("b")).getOrElse("b not found") should equal("2") + + (proc !! CONTAINS("b")).getOrElse("b not found") should equal(true) + (proc !! CONTAINS("debasish")).getOrElse("debasish not found") should equal(false) + (proc !! MAP_SIZE).getOrElse("Size failed") should equal(5) + proc.stop + } + } + + describe("Ticket 343 Issue #2") { + it("clear after put should work within the same transaction") { + val proc = actorOf[HbaseSampleMapStorage] + proc.start + + (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) + (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") + (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) + + val add = List(("a", "1"), ("b", "2"), ("c", "3")) + (proc !! CLEAR_AFTER_PUT(add)).getOrElse("CLEAR_AFTER_PUT failed") should equal(true) + + (proc !! MAP_SIZE).getOrElse("Size failed") should equal(0) + proc.stop + } + } + + describe("Ticket 343 Issue #3") { + it("map size should change after the transaction") { + val proc = actorOf[HbaseSampleMapStorage] + proc.start + + (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) + (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") + (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) + + (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) + (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) + + (proc !! GET("dg")).getOrElse("Get failed") should equal("1") + (proc !! GET("mc")).getOrElse("Get failed") should equal("2") + (proc !! GET("nd")).getOrElse("Get failed") should equal("3") + proc.stop + } + } + + describe("slice test") { + it("should pass") { + val proc = actorOf[HbaseSampleMapStorage] + proc.start + + (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) + (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") + // (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) + + (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) + (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) + + (proc !! PUT_WITH_SLICE(List(("ec", "1"), ("tb", "2"), ("mc", "10")), "dg", 3)).get.asInstanceOf[List[(Array[Byte], Array[Byte])]].map { case (k, v) => (new String(k), new String(v)) } should equal(List(("dg", "1"), ("ec", "1"), ("mc", "10"))) + + (proc !! PUT_REM_WITH_SLICE(List(("fc", "1"), ("gb", "2"), ("xy", "10")), List("tb", "fc"), "dg", 5)).get.asInstanceOf[List[(Array[Byte], Array[Byte])]].map { case (k, v) => (new String(k), new String(v)) } should equal(List(("dg", "1"), ("ec", "1"), ("gb", "2"), ("mc", "10"), ("nd", "3"))) + proc.stop + } + } + + describe("Ticket 343 Issue #4") { + it("vector get should not ignore elements that were in vector before transaction") { + + val proc = actorOf[HbaseSampleVectorStorage] + proc.start + + // add 4 elements in separate transactions + (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) + (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) + (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) + (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) + + new String((proc !! VGET(0)).get.asInstanceOf[Array[Byte]] ) should equal("nilanjan") + new String((proc !! VGET(1)).get.asInstanceOf[Array[Byte]] ) should equal("ramanendu") + new String((proc !! VGET(2)).get.asInstanceOf[Array[Byte]] ) should equal("maulindu") + new String((proc !! VGET(3)).get.asInstanceOf[Array[Byte]] ) should equal("debasish") + + // now add 3 more and do gets in the same transaction + (proc !! VGET_AFTER_VADD(List("a", "b", "c"), List(0, 2, 4))).get.asInstanceOf[List[String]] should equal(List("c", "a", "ramanendu")) + proc.stop + } + } + + describe("Ticket 343 Issue #6") { + it("vector update should not ignore transaction") { + val proc = actorOf[HbaseSampleVectorStorage] + proc.start + + // add 4 elements in separate transactions + (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) + (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) + (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) + (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) + + evaluating { + (proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed") + } should produce [Exception] + + // update aborts and hence values will remain unchanged + new String((proc !! VGET(0)).get.asInstanceOf[Array[Byte]] ) should equal("nilanjan") + proc.stop + } + } + + describe("Ticket 343 Issue #5") { + it("vector slice() should not ignore elements added in current transaction") { + val proc = actorOf[HbaseSampleVectorStorage] + proc.start + + // add 4 elements in separate transactions + (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) + (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) + (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) + (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) + + // slice with no new elements added in current transaction + (proc !! VADD_WITH_SLICE(List(), 2, 2)).getOrElse("VADD_WITH_SLICE failed") should equal(Vector("maulindu", "debasish")) + + // slice with new elements added in current transaction + (proc !! VADD_WITH_SLICE(List("a", "b", "c", "d"), 2, 2)).getOrElse("VADD_WITH_SLICE failed") should equal(Vector("b", "a")) + proc.stop + } + } +} diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTestIntegration.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTestIntegration.scala new file mode 100644 index 0000000000..8df7bbc7c9 --- /dev/null +++ b/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTestIntegration.scala @@ -0,0 +1,62 @@ +package se.scalablesolutions.akka.persistence.hbase + +import org.scalatest.Spec +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith +import org.junit.Test + +import org.apache.hadoop.hbase.HBaseTestingUtility + +@RunWith(classOf[JUnitRunner]) +class SimpleHbaseSpecTestIntegration extends Spec with BeforeAndAfterAll with ShouldMatchers { + + import org.apache.hadoop.hbase.HBaseTestingUtility + + val testUtil = new HBaseTestingUtility + + override def beforeAll { + testUtil.startMiniCluster + } + + override def afterAll { + testUtil.shutdownMiniCluster + } + + describe("simple hbase persistence test") { + it("should create a table") { + import org.apache.hadoop.hbase.util.Bytes + import org.apache.hadoop.hbase.HTableDescriptor + import org.apache.hadoop.hbase.HColumnDescriptor + import org.apache.hadoop.hbase.client.HBaseAdmin + import org.apache.hadoop.hbase.client.HTable + + val descriptor = new HTableDescriptor(Bytes.toBytes("ATable")) + descriptor.addFamily(new HColumnDescriptor(Bytes.toBytes("Family1"))) + descriptor.addFamily(new HColumnDescriptor(Bytes.toBytes("Family2"))) + val admin = new HBaseAdmin(testUtil.getConfiguration) + admin.createTable(descriptor) + val table = new HTable(testUtil.getConfiguration, Bytes.toBytes("ATable")) + + table should not equal (null) + } + + it("should use the quorum read from the akka configuration and access the table") { + import se.scalablesolutions.akka.config.Config.config + import org.apache.hadoop.hbase.HBaseConfiguration + import org.apache.hadoop.hbase.client.HBaseAdmin + import org.apache.hadoop.hbase.client.HTable + + val HBASE_ZOOKEEPER_QUORUM = config.getString("akka.storage.hbase.zookeeper-quorum", "0") + HBASE_ZOOKEEPER_QUORUM should not equal ("0") + HBASE_ZOOKEEPER_QUORUM should equal("localhost") + + val configuration = new HBaseConfiguration + configuration.set("hbase.zookeeper.quorum", HBASE_ZOOKEEPER_QUORUM) + val admin = new HBaseAdmin(configuration) + admin.tableExists("ATable") should equal(true) + } + } + +} From c0dd6da2281724b07a05f90513dbcf4315eac046 Mon Sep 17 00:00:00 2001 From: David Greco Date: Fri, 24 Sep 2010 10:08:27 +0200 Subject: [PATCH 45/52] Aligned the hbase test to the new mechanism for optionally running integration tests --- config/akka-reference.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/akka-reference.conf b/config/akka-reference.conf index eec56c7f06..ef6f8817ab 100644 --- a/config/akka-reference.conf +++ b/config/akka-reference.conf @@ -166,7 +166,7 @@ akka { } hbase { - zookeeper-quorum = "localhost" + zookeeper-quorum = "localhost" # A comma separated list of the hosts belonging to the zookeeper quorum } voldemort { From f5a37670dd4e68707833b20b12b181e38033a802 Mon Sep 17 00:00:00 2001 From: David Greco Date: Fri, 24 Sep 2010 10:09:02 +0200 Subject: [PATCH 46/52] Aligned the hbase test to the new mechanism for optionally running integration tests --- config/akka-reference.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/akka-reference.conf b/config/akka-reference.conf index ef6f8817ab..b44fdf867f 100644 --- a/config/akka-reference.conf +++ b/config/akka-reference.conf @@ -166,7 +166,7 @@ akka { } hbase { - zookeeper-quorum = "localhost" # A comma separated list of the hosts belonging to the zookeeper quorum + zookeeper-quorum = "localhost" # A comma separated list of the hostnames or IP belonging to the zookeeper quorum } voldemort { From cc80abf79ba165dabc56c968411ed4899dabd69a Mon Sep 17 00:00:00 2001 From: Martin Krasser Date: Fri, 24 Sep 2010 10:40:46 +0200 Subject: [PATCH 47/52] Renamed two akka-camel tests from *Spec to *Test --- ...rviceManagerSpec.scala => CamelServiceManagerTest.scala} | 2 +- .../test/scala/{ConsumerSpec.scala => ConsumerTest.scala} | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) rename akka-camel/src/test/scala/{CamelServiceManagerSpec.scala => CamelServiceManagerTest.scala} (97%) rename akka-camel/src/test/scala/{ConsumerSpec.scala => ConsumerTest.scala} (98%) diff --git a/akka-camel/src/test/scala/CamelServiceManagerSpec.scala b/akka-camel/src/test/scala/CamelServiceManagerTest.scala similarity index 97% rename from akka-camel/src/test/scala/CamelServiceManagerSpec.scala rename to akka-camel/src/test/scala/CamelServiceManagerTest.scala index 222c1a17c6..fd15ce7154 100644 --- a/akka-camel/src/test/scala/CamelServiceManagerSpec.scala +++ b/akka-camel/src/test/scala/CamelServiceManagerTest.scala @@ -8,7 +8,7 @@ import se.scalablesolutions.akka.actor.ActorRegistry /** * @author Martin Krasser */ -class CamelServiceManagerSpec extends WordSpec with BeforeAndAfterAll with MustMatchers { +class CamelServiceManagerTest extends WordSpec with BeforeAndAfterAll with MustMatchers { override def afterAll = ActorRegistry.shutdownAll diff --git a/akka-camel/src/test/scala/ConsumerSpec.scala b/akka-camel/src/test/scala/ConsumerTest.scala similarity index 98% rename from akka-camel/src/test/scala/ConsumerSpec.scala rename to akka-camel/src/test/scala/ConsumerTest.scala index 678ed70057..2a2cc0b11f 100644 --- a/akka-camel/src/test/scala/ConsumerSpec.scala +++ b/akka-camel/src/test/scala/ConsumerTest.scala @@ -13,9 +13,9 @@ import se.scalablesolutions.akka.actor._ /** * @author Martin Krasser */ -class ConsumerSpec extends WordSpec with BeforeAndAfterAll with MustMatchers { +class ConsumerTest extends WordSpec with BeforeAndAfterAll with MustMatchers { import CamelContextManager.template - import ConsumerSpec._ + import ConsumerTest._ var service: CamelService = _ @@ -174,7 +174,7 @@ class ConsumerSpec extends WordSpec with BeforeAndAfterAll with MustMatchers { } } -object ConsumerSpec { +object ConsumerTest { class TestConsumer(uri: String) extends Actor with Consumer { def endpointUri = uri protected def receive = { From 54ec9e3e64a2fc611f242f7faca8fe8ece199f4d Mon Sep 17 00:00:00 2001 From: Martin Krasser Date: Fri, 24 Sep 2010 11:53:19 +0200 Subject: [PATCH 48/52] Organized imports --- akka-camel/src/main/scala/component/ActorComponent.scala | 9 +++------ .../src/test/scala/component/ActorComponentTest.scala | 1 + .../scala/component/TypedActorComponentFeatureTest.scala | 7 +++---- 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/akka-camel/src/main/scala/component/ActorComponent.scala b/akka-camel/src/main/scala/component/ActorComponent.scala index a9c96eebb9..57afd8d500 100644 --- a/akka-camel/src/main/scala/component/ActorComponent.scala +++ b/akka-camel/src/main/scala/component/ActorComponent.scala @@ -14,16 +14,13 @@ import jsr166x.Deque import org.apache.camel._ import org.apache.camel.impl.{DefaultProducer, DefaultEndpoint, DefaultComponent} -import se.scalablesolutions.akka.camel.{Failure, CamelMessageConversion, Message} -import CamelMessageConversion.toExchangeAdapter +import se.scalablesolutions.akka.actor._ +import se.scalablesolutions.akka.camel.{Failure, Message} +import se.scalablesolutions.akka.camel.CamelMessageConversion.toExchangeAdapter import se.scalablesolutions.akka.dispatch.{CompletableFuture, MessageInvocation, MessageDispatcher} import se.scalablesolutions.akka.stm.TransactionConfig -import se.scalablesolutions.akka.actor.{ScalaActorRef, ActorRegistry, Actor, ActorRef, Uuid, uuidFrom} - -import se.scalablesolutions.akka.AkkaException import scala.reflect.BeanProperty -import se.scalablesolutions.akka.actor._ /** * Camel component for sending messages to and receiving replies from (untyped) actors. diff --git a/akka-camel/src/test/scala/component/ActorComponentTest.scala b/akka-camel/src/test/scala/component/ActorComponentTest.scala index f35e8b3885..50c6e664e7 100644 --- a/akka-camel/src/test/scala/component/ActorComponentTest.scala +++ b/akka-camel/src/test/scala/component/ActorComponentTest.scala @@ -4,6 +4,7 @@ import org.apache.camel.{Endpoint, AsyncProcessor} import org.apache.camel.impl.DefaultCamelContext import org.junit._ import org.scalatest.junit.JUnitSuite + import se.scalablesolutions.akka.actor.uuidFrom class ActorComponentTest extends JUnitSuite { diff --git a/akka-camel/src/test/scala/component/TypedActorComponentFeatureTest.scala b/akka-camel/src/test/scala/component/TypedActorComponentFeatureTest.scala index 06f7e29173..37352eb154 100644 --- a/akka-camel/src/test/scala/component/TypedActorComponentFeatureTest.scala +++ b/akka-camel/src/test/scala/component/TypedActorComponentFeatureTest.scala @@ -1,13 +1,12 @@ package se.scalablesolutions.akka.camel.component +import org.apache.camel._ +import org.apache.camel.builder.RouteBuilder +import org.apache.camel.impl.{DefaultCamelContext, SimpleRegistry} import org.scalatest.{BeforeAndAfterEach, BeforeAndAfterAll, FeatureSpec} -import org.apache.camel.builder.RouteBuilder -import se.scalablesolutions.akka.actor.Actor._ import se.scalablesolutions.akka.actor.{ActorRegistry, TypedActor} import se.scalablesolutions.akka.camel._ -import org.apache.camel.impl.{DefaultCamelContext, SimpleRegistry} -import org.apache.camel.{ResolveEndpointFailedException, ExchangePattern, Exchange, Processor} /** * @author Martin Krasser From 65ad0e2b5a46478129c8405c0eb47207c0deb44a Mon Sep 17 00:00:00 2001 From: Martin Krasser Date: Fri, 24 Sep 2010 12:08:27 +0200 Subject: [PATCH 49/52] Only execute tests matching *Test by default in akka-camel and akka-sample-camel. Rename stress tests in akka-sample-camel to *TestStress. --- .../scala/HttpConcurrencyTestStress.scala | 99 +++++++++++++++++++ project/build/AkkaProject.scala | 4 + 2 files changed, 103 insertions(+) create mode 100644 akka-samples/akka-sample-camel/src/test/scala/HttpConcurrencyTestStress.scala diff --git a/akka-samples/akka-sample-camel/src/test/scala/HttpConcurrencyTestStress.scala b/akka-samples/akka-sample-camel/src/test/scala/HttpConcurrencyTestStress.scala new file mode 100644 index 0000000000..3813463601 --- /dev/null +++ b/akka-samples/akka-sample-camel/src/test/scala/HttpConcurrencyTestStress.scala @@ -0,0 +1,99 @@ +package sample.camel + +import collection.mutable.Set + +import java.util.concurrent.CountDownLatch + +import org.junit._ +import org.scalatest.junit.JUnitSuite + +import se.scalablesolutions.akka.actor.Actor._ +import se.scalablesolutions.akka.actor.{ActorRegistry, ActorRef, Actor} +import se.scalablesolutions.akka.camel._ +import se.scalablesolutions.akka.camel.CamelServiceManager._ +import se.scalablesolutions.akka.routing.CyclicIterator +import se.scalablesolutions.akka.routing.Routing._ + +/** + * @author Martin Krasser + */ +class HttpConcurrencyTestStress extends JUnitSuite { + import HttpConcurrencyTestStress._ + + @Test def shouldProcessMessagesConcurrently = { + val num = 50 + val latch1 = new CountDownLatch(num) + val latch2 = new CountDownLatch(num) + val latch3 = new CountDownLatch(num) + val client1 = actorOf(new HttpClientActor("client1", latch1)).start + val client2 = actorOf(new HttpClientActor("client2", latch2)).start + val client3 = actorOf(new HttpClientActor("client3", latch3)).start + for (i <- 1 to num) { + client1 ! Message("client1", Map(Message.MessageExchangeId -> i)) + client2 ! Message("client2", Map(Message.MessageExchangeId -> i)) + client3 ! Message("client3", Map(Message.MessageExchangeId -> i)) + } + latch1.await + latch2.await + latch3.await + assert(num == (client1 !! "getCorrelationIdCount").as[Int].get) + assert(num == (client2 !! "getCorrelationIdCount").as[Int].get) + assert(num == (client3 !! "getCorrelationIdCount").as[Int].get) + } +} + +object HttpConcurrencyTestStress { + @BeforeClass + def beforeClass = { + startCamelService + + val workers = for (i <- 1 to 8) yield actorOf[HttpServerWorker].start + val balancer = loadBalancerActor(new CyclicIterator(workers.toList)) + + val completion = service.expectEndpointActivationCount(1) + val server = actorOf(new HttpServerActor(balancer)).start + completion.await + } + + @AfterClass + def afterClass = { + stopCamelService + ActorRegistry.shutdownAll + } + + class HttpClientActor(label: String, latch: CountDownLatch) extends Actor with Producer { + def endpointUri = "jetty:http://0.0.0.0:8855/echo" + var correlationIds = Set[Any]() + + override protected def receive = { + case "getCorrelationIdCount" => self.reply(correlationIds.size) + case msg => super.receive(msg) + } + + override protected def receiveAfterProduce = { + case msg: Message => { + val corr = msg.headers(Message.MessageExchangeId) + val body = msg.bodyAs[String] + correlationIds += corr + assert(label == body) + latch.countDown + print(".") + } + } + } + + class HttpServerActor(balancer: ActorRef) extends Actor with Consumer { + def endpointUri = "jetty:http://0.0.0.0:8855/echo" + var counter = 0 + + def receive = { + case msg => balancer forward msg + } + } + + class HttpServerWorker extends Actor { + protected def receive = { + case msg => self.reply(msg) + } + } +} diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index 299207b868..a1c2a9fdf6 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -478,6 +478,8 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaCamelProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { val camel_core = Dependencies.camel_core + + override def testOptions = createTestFilter( _.endsWith("Test")) } // ------------------------------------------------------------------------------------------------------------------- @@ -762,6 +764,8 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { + + override def testOptions = createTestFilter( _.endsWith("Test")) } class AkkaSampleSecurityProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) { From 72e8b95a71b159a198eccbb6cd2c8b93b9e57c63 Mon Sep 17 00:00:00 2001 From: Martin Krasser Date: Fri, 24 Sep 2010 12:11:25 +0200 Subject: [PATCH 50/52] Only execute tests matching *Test by default in akka-camel and akka-sample-camel. Rename stress tests in akka-sample-camel to *TestStress. --- .../src/test/scala/HttpConcurrencyTest.scala | 100 ------------------ 1 file changed, 100 deletions(-) delete mode 100644 akka-samples/akka-sample-camel/src/test/scala/HttpConcurrencyTest.scala diff --git a/akka-samples/akka-sample-camel/src/test/scala/HttpConcurrencyTest.scala b/akka-samples/akka-sample-camel/src/test/scala/HttpConcurrencyTest.scala deleted file mode 100644 index 1a115c6f76..0000000000 --- a/akka-samples/akka-sample-camel/src/test/scala/HttpConcurrencyTest.scala +++ /dev/null @@ -1,100 +0,0 @@ -package sample.camel - -import collection.mutable.Set - -import java.util.concurrent.CountDownLatch - -import org.junit._ -import org.scalatest.junit.JUnitSuite - -import se.scalablesolutions.akka.actor.Actor._ -import se.scalablesolutions.akka.actor.{ActorRegistry, ActorRef, Actor} -import se.scalablesolutions.akka.camel._ -import se.scalablesolutions.akka.camel.CamelServiceManager._ -import se.scalablesolutions.akka.routing.CyclicIterator -import se.scalablesolutions.akka.routing.Routing._ - -/** - * @author Martin Krasser - */ -@Ignore // do not run concurrency test by default -class HttpConcurrencyTest extends JUnitSuite { - import HttpConcurrencyTest._ - - @Test def shouldProcessMessagesConcurrently = { - val num = 50 - val latch1 = new CountDownLatch(num) - val latch2 = new CountDownLatch(num) - val latch3 = new CountDownLatch(num) - val client1 = actorOf(new HttpClientActor("client1", latch1)).start - val client2 = actorOf(new HttpClientActor("client2", latch2)).start - val client3 = actorOf(new HttpClientActor("client3", latch3)).start - for (i <- 1 to num) { - client1 ! Message("client1", Map(Message.MessageExchangeId -> i)) - client2 ! Message("client2", Map(Message.MessageExchangeId -> i)) - client3 ! Message("client3", Map(Message.MessageExchangeId -> i)) - } - latch1.await - latch2.await - latch3.await - assert(num == (client1 !! "getCorrelationIdCount").as[Int].get) - assert(num == (client2 !! "getCorrelationIdCount").as[Int].get) - assert(num == (client3 !! "getCorrelationIdCount").as[Int].get) - } -} - -object HttpConcurrencyTest { - @BeforeClass - def beforeClass = { - startCamelService - - val workers = for (i <- 1 to 8) yield actorOf[HttpServerWorker].start - val balancer = loadBalancerActor(new CyclicIterator(workers.toList)) - - val completion = service.expectEndpointActivationCount(1) - val server = actorOf(new HttpServerActor(balancer)).start - completion.await - } - - @AfterClass - def afterClass = { - stopCamelService - ActorRegistry.shutdownAll - } - - class HttpClientActor(label: String, latch: CountDownLatch) extends Actor with Producer { - def endpointUri = "jetty:http://0.0.0.0:8855/echo" - var correlationIds = Set[Any]() - - override protected def receive = { - case "getCorrelationIdCount" => self.reply(correlationIds.size) - case msg => super.receive(msg) - } - - override protected def receiveAfterProduce = { - case msg: Message => { - val corr = msg.headers(Message.MessageExchangeId) - val body = msg.bodyAs[String] - correlationIds += corr - assert(label == body) - latch.countDown - print(".") - } - } - } - - class HttpServerActor(balancer: ActorRef) extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8855/echo" - var counter = 0 - - def receive = { - case msg => balancer forward msg - } - } - - class HttpServerWorker extends Actor { - protected def receive = { - case msg => self.reply(msg) - } - } -} From 934a9db76b7499afb7532205b8481884a368e147 Mon Sep 17 00:00:00 2001 From: Debasish Ghosh Date: Fri, 24 Sep 2010 15:42:52 +0530 Subject: [PATCH 51/52] reducing boilerplate imports with package objects --- .../main/scala/serialization/Serializable.scala | 2 +- .../src/main/scala/serialization/Serializer.scala | 1 - .../src/main/scala/serialization/package.scala | 9 +++++++++ .../serialization/ScalaJSONSerializableSpec.scala | 14 +++++--------- .../SerializableTypeClassActorSpec.scala | 10 ++++++---- 5 files changed, 21 insertions(+), 15 deletions(-) create mode 100644 akka-remote/src/main/scala/serialization/package.scala diff --git a/akka-remote/src/main/scala/serialization/Serializable.scala b/akka-remote/src/main/scala/serialization/Serializable.scala index c446dbbe59..b15784f9ce 100644 --- a/akka-remote/src/main/scala/serialization/Serializable.scala +++ b/akka-remote/src/main/scala/serialization/Serializable.scala @@ -114,7 +114,7 @@ object Serializable { * @author Jonas Bonér */ trait ScalaJSON[T] extends JSON { - def toJSON: String = new String(toBytes, "UTF-8") + def toJSON: String def fromJSON(js: String): T def toBytes: Array[Byte] def fromBytes(bytes: Array[Byte]): T diff --git a/akka-remote/src/main/scala/serialization/Serializer.scala b/akka-remote/src/main/scala/serialization/Serializer.scala index 9df1f4200a..871ce1f681 100644 --- a/akka-remote/src/main/scala/serialization/Serializer.scala +++ b/akka-remote/src/main/scala/serialization/Serializer.scala @@ -129,7 +129,6 @@ object Serializer { * @author Jonas Bonér */ trait ScalaJSON { - import dispatch.json._ import sjson.json._ var classLoader: Option[ClassLoader] = None diff --git a/akka-remote/src/main/scala/serialization/package.scala b/akka-remote/src/main/scala/serialization/package.scala new file mode 100644 index 0000000000..1a3c83341f --- /dev/null +++ b/akka-remote/src/main/scala/serialization/package.scala @@ -0,0 +1,9 @@ +package se.scalablesolutions.akka + +package object serialization { + type JsValue = _root_.dispatch.json.JsValue + val JsValue = _root_.dispatch.json.JsValue + val Js = _root_.dispatch.json.Js + val JsonSerialization = sjson.json.JsonSerialization + val DefaultProtocol = sjson.json.DefaultProtocol +} diff --git a/akka-remote/src/test/scala/serialization/ScalaJSONSerializableSpec.scala b/akka-remote/src/test/scala/serialization/ScalaJSONSerializableSpec.scala index 0ca548d4e1..68b2f171e4 100644 --- a/akka-remote/src/test/scala/serialization/ScalaJSONSerializableSpec.scala +++ b/akka-remote/src/test/scala/serialization/ScalaJSONSerializableSpec.scala @@ -9,16 +9,15 @@ import org.junit.runner.RunWith import se.scalablesolutions.akka.serialization.Serializable.ScalaJSON object Serializables { - import sjson.json.DefaultProtocol._ + import DefaultProtocol._ + import JsonSerialization._ + case class Shop(store: String, item: String, price: Int) extends ScalaJSON[Shop] { implicit val ShopFormat: sjson.json.Format[Shop] = asProduct3("store", "item", "price")(Shop)(Shop.unapply(_).get) - import dispatch.json._ - import sjson.json._ - import sjson.json.JsonSerialization._ - + def toJSON: String = JsValue.toJson(tojson(this)) def toBytes: Array[Byte] = tobinary(this) def fromBytes(bytes: Array[Byte]) = frombinary[Shop](bytes) def fromJSON(js: String) = fromjson[Shop](Js(js)) @@ -33,10 +32,7 @@ object Serializables { implicit val MyJsonObjectFormat: sjson.json.Format[MyJsonObject] = asProduct3("key", "map", "standAloneInt")(MyJsonObject)(MyJsonObject.unapply(_).get) - import dispatch.json._ - import sjson.json._ - import sjson.json.JsonSerialization._ - + def toJSON: String = JsValue.toJson(tojson(this)) def toBytes: Array[Byte] = tobinary(this) def fromBytes(bytes: Array[Byte]) = frombinary[MyJsonObject](bytes) def fromJSON(js: String) = fromjson[MyJsonObject](Js(js)) diff --git a/akka-remote/src/test/scala/serialization/SerializableTypeClassActorSpec.scala b/akka-remote/src/test/scala/serialization/SerializableTypeClassActorSpec.scala index de64b803fa..90c445dd3c 100644 --- a/akka-remote/src/test/scala/serialization/SerializableTypeClassActorSpec.scala +++ b/akka-remote/src/test/scala/serialization/SerializableTypeClassActorSpec.scala @@ -8,7 +8,7 @@ import org.scalatest.junit.JUnitRunner import org.junit.runner.RunWith import se.scalablesolutions.akka.serialization._ -import dispatch.json._ +// import dispatch.json._ import se.scalablesolutions.akka.actor._ import ActorSerialization._ import Actor._ @@ -230,12 +230,14 @@ case class MyMessage(val id: String, val value: Tuple2[String, Int]) extends Serializable.ScalaJSON[MyMessage] { def this() = this(null, null) - import sjson.json.DefaultProtocol._ - import sjson.json._ - import sjson.json.JsonSerialization._ + + import DefaultProtocol._ + import JsonSerialization._ + implicit val MyMessageFormat: sjson.json.Format[MyMessage] = asProduct2("id", "value")(MyMessage)(MyMessage.unapply(_).get) + def toJSON: String = JsValue.toJson(tojson(this)) def toBytes: Array[Byte] = tobinary(this) def fromBytes(bytes: Array[Byte]) = frombinary[MyMessage](bytes) def fromJSON(js: String) = fromjson[MyMessage](Js(js)) From f6868e19cfe1749b5b9c57c0ee10ba7f77b4cdb2 Mon Sep 17 00:00:00 2001 From: Martin Krasser Date: Fri, 24 Sep 2010 18:11:29 +0200 Subject: [PATCH 52/52] API-docs improvements. --- .../main/scala/CamelContextLifecycle.scala | 18 +++--- akka-camel/src/main/scala/CamelService.scala | 38 +++++++----- akka-camel/src/main/scala/Consumer.scala | 28 ++++----- .../src/main/scala/ConsumerPublisher.scala | 38 ++++++------ akka-camel/src/main/scala/Message.scala | 6 +- akka-camel/src/main/scala/Producer.scala | 58 ++++++++++--------- .../main/scala/component/ActorComponent.scala | 44 +++++++++----- .../scala/component/TypedActorComponent.scala | 19 +++--- 8 files changed, 135 insertions(+), 114 deletions(-) diff --git a/akka-camel/src/main/scala/CamelContextLifecycle.scala b/akka-camel/src/main/scala/CamelContextLifecycle.scala index 05c18396b8..32bb386dee 100644 --- a/akka-camel/src/main/scala/CamelContextLifecycle.scala +++ b/akka-camel/src/main/scala/CamelContextLifecycle.scala @@ -13,7 +13,7 @@ import se.scalablesolutions.akka.camel.component.TypedActorComponent import se.scalablesolutions.akka.util.Logging /** - * Defines the lifecycle of a CamelContext. Allowed state transitions are + * Manages the lifecycle of a CamelContext. Allowed transitions are * init -> start -> stop -> init -> ... etc. * * @author Martin Krasser @@ -35,7 +35,7 @@ trait CamelContextLifecycle extends Logging { /** * Registry in which typed actors are TEMPORARILY registered during - * creation of Camel routes to typed actors. + * creation of Camel routes to these actors. */ private[camel] var typedActorRegistry: Map[String, AnyRef] = _ @@ -63,7 +63,7 @@ trait CamelContextLifecycle extends Logging { def started = _started /** - * Starts the CamelContext and ProducerTemplate. + * Starts the CamelContext and an associated ProducerTemplate. */ def start = { context.start @@ -73,7 +73,7 @@ trait CamelContextLifecycle extends Logging { } /** - * Stops the CamelContext and ProducerTemplate. + * Stops the CamelContext and the associated ProducerTemplate. */ def stop = { template.stop @@ -90,11 +90,10 @@ trait CamelContextLifecycle extends Logging { /** * Initializes this lifecycle object with the given CamelContext. For the passed - * CamelContext stream-caching is enabled. If applications want to disable stream- + * CamelContext, stream-caching is enabled. If applications want to disable stream- * caching they can do so after this method returned and prior to calling start. - * This method also registers a new - * {@link se.scalablesolutions.akka.camel.component.TypedActorComponent} at - * context under a name defined by TypedActorComponent.InternalSchema. + * This method also registers a new TypedActorComponent at the passes CamelContext + * under a name defined by TypedActorComponent.InternalSchema. */ def init(context: CamelContext) { this.typedActorComponent = new TypedActorComponent @@ -109,8 +108,7 @@ trait CamelContextLifecycle extends Logging { } /** - * Makes a global CamelContext and ProducerTemplate accessible to applications. The lifecycle - * of these objects is managed by se.scalablesolutions.akka.camel.CamelService. + * Manages a global CamelContext and an associated ProducerTemplate. */ object CamelContextManager extends CamelContextLifecycle { override def context: CamelContext = super.context diff --git a/akka-camel/src/main/scala/CamelService.scala b/akka-camel/src/main/scala/CamelService.scala index 5fd8c9a66c..033fc2d01b 100644 --- a/akka-camel/src/main/scala/CamelService.scala +++ b/akka-camel/src/main/scala/CamelService.scala @@ -12,9 +12,10 @@ import se.scalablesolutions.akka.actor.{AspectInitRegistry, ActorRegistry} import se.scalablesolutions.akka.util.{Bootable, Logging} /** - * Used by applications (and the Kernel) to publish consumer actors and typed actors via - * Camel endpoints and to manage the life cycle of a a global CamelContext which can be - * accessed via se.scalablesolutions.akka.camel.CamelContextManager.context. + * Publishes (untyped) consumer actors and typed consumer actors via Camel endpoints. Actors + * are published (asynchronously) when they are started and unpublished (asynchronously) when + * they are stopped. The CamelService is notified about actor start- and stop-events by + * registering listeners at ActorRegistry and AspectInitRegistry. * * @author Martin Krasser */ @@ -29,11 +30,11 @@ trait CamelService extends Bootable with Logging { AspectInitRegistry.addListener(publishRequestor) /** - * Starts the CamelService. Any started actor that is a consumer actor will be (asynchronously) + * Starts this CamelService. Any started actor that is a consumer actor will be (asynchronously) * published as Camel endpoint. Consumer actors that are started after this method returned will * be published as well. Actor publishing is done asynchronously. A started (loaded) CamelService * also publishes @consume annotated methods of typed actors that have been created - * with TypedActor.newInstance(..) (and TypedActor.newInstance(..) + * with TypedActor.newInstance(..) (and TypedActor.newRemoteInstance(..) * on a remote node). */ abstract override def onLoad = { @@ -54,7 +55,8 @@ trait CamelService extends Bootable with Logging { } /** - * Stops the CamelService. + * Stops this CamelService. All published consumer actors and typed consumer actor methods will be + * unpublished asynchronously. */ abstract override def onUnload = { // Unregister this instance as current CamelService @@ -98,24 +100,24 @@ trait CamelService extends Bootable with Logging { def stop = onUnload /** - * Sets an expectation of the number of upcoming endpoint activations and returns - * a {@link CountDownLatch} that can be used to wait for the activations to occur. - * Endpoint activations that occurred in the past are not considered. + * Sets an expectation on the number of upcoming endpoint activations and returns + * a CountDownLatch that can be used to wait for the activations to occur. Endpoint + * activations that occurred in the past are not considered. */ def expectEndpointActivationCount(count: Int): CountDownLatch = (consumerPublisher !! SetExpectedRegistrationCount(count)).as[CountDownLatch].get /** - * Sets an expectation of the number of upcoming endpoint de-activations and returns - * a {@link CountDownLatch} that can be used to wait for the de-activations to occur. - * Endpoint de-activations that occurred in the past are not considered. + * Sets an expectation on the number of upcoming endpoint de-activations and returns + * a CountDownLatch that can be used to wait for the de-activations to occur. Endpoint + * de-activations that occurred in the past are not considered. */ def expectEndpointDeactivationCount(count: Int): CountDownLatch = (consumerPublisher !! SetExpectedUnregistrationCount(count)).as[CountDownLatch].get } /** - * ... + * Manages a global CamelService (the 'current' CamelService). * * @author Martin Krasser */ @@ -128,11 +130,17 @@ object CamelServiceManager { /** * Starts a new CamelService and makes it the current CamelService. + * + * @see CamelService#start + * @see CamelService#onLoad */ def startCamelService = CamelServiceFactory.createCamelService.start /** * Stops the current CamelService. + * + * @see CamelService#stop + * @see CamelService#onUnload */ def stopCamelService = service.stop @@ -159,12 +167,12 @@ object CamelServiceManager { */ object CamelServiceFactory { /** - * Creates a new CamelService instance + * Creates a new CamelService instance. */ def createCamelService: CamelService = new CamelService { } /** - * Creates a new CamelService instance + * Creates a new CamelService instance and initializes it with the given CamelContext. */ def createCamelService(camelContext: CamelContext): CamelService = { CamelContextManager.init(camelContext) diff --git a/akka-camel/src/main/scala/Consumer.scala b/akka-camel/src/main/scala/Consumer.scala index ea07757a9c..db04c46abf 100644 --- a/akka-camel/src/main/scala/Consumer.scala +++ b/akka-camel/src/main/scala/Consumer.scala @@ -20,30 +20,24 @@ trait Consumer { self: Actor => def endpointUri: String /** - * Determines whether two-way communications with this consumer actor should - * be done in blocking or non-blocking mode (default is non-blocking). One-way - * communications never block. + * Determines whether two-way communications between an endpoint and this consumer actor + * should be done in blocking or non-blocking mode (default is non-blocking). This method + * doesn't have any effect on one-way communications (they'll never block). */ def blocking = false } /** - * Java-friendly {@link Consumer} inherited by + * Java-friendly Consumer. * - *

- * - * implementations. + * @see UntypedConsumerActor + * @see RemoteUntypedConsumerActor + * @see UntypedConsumerTransactor * * @author Martin Krasser */ trait UntypedConsumer extends Consumer { self: UntypedActor => - final override def endpointUri = getEndpointUri - final override def blocking = isBlocking /** @@ -52,9 +46,9 @@ trait UntypedConsumer extends Consumer { self: UntypedActor => def getEndpointUri(): String /** - * Determines whether two-way communications with this consumer actor should - * be done in blocking or non-blocking mode (default is non-blocking). One-way - * communications never block. + * Determines whether two-way communications between an endpoint and this consumer actor + * should be done in blocking or non-blocking mode (default is non-blocking). This method + * doesn't have any effect on one-way communications (they'll never block). */ def isBlocking() = super.blocking } @@ -89,7 +83,7 @@ private[camel] object Consumer { * reference with a target actor that implements the Consumer trait. The * target Consumer object is passed as argument to f. This * method returns None if actorRef is not a valid reference - * to a consumer actor, Some result otherwise. + * to a consumer actor, Some consumer actor otherwise. */ def forConsumer[T](actorRef: ActorRef)(f: Consumer => T): Option[T] = { if (!actorRef.actor.isInstanceOf[Consumer]) None diff --git a/akka-camel/src/main/scala/ConsumerPublisher.scala b/akka-camel/src/main/scala/ConsumerPublisher.scala index 472d7d6dad..dfc1510ea6 100644 --- a/akka-camel/src/main/scala/ConsumerPublisher.scala +++ b/akka-camel/src/main/scala/ConsumerPublisher.scala @@ -28,7 +28,7 @@ private[camel] object ConsumerPublisher extends Logging { } /** - * Stops route to the already un-registered consumer actor. + * Stops the route to the already un-registered consumer actor. */ def handleConsumerUnregistered(event: ConsumerUnregistered) { CamelContextManager.context.stopRoute(event.uuid.toString) @@ -48,7 +48,7 @@ private[camel] object ConsumerPublisher extends Logging { } /** - * Stops route to the already un-registered consumer actor method. + * Stops the route to the already un-registered consumer actor method. */ def handleConsumerMethodUnregistered(event: ConsumerMethodUnregistered) { val targetMethod = event.method.getName @@ -62,10 +62,10 @@ private[camel] object ConsumerPublisher extends Logging { /** * Actor that publishes consumer actors and typed actor methods at Camel endpoints. - * The Camel context used for publishing is CamelContextManager.context. This actor - * accepts messages of type + * The Camel context used for publishing is obtained via CamelContextManager.context. + * This actor accepts messages of type * se.scalablesolutions.akka.camel.ConsumerRegistered, - * se.scalablesolutions.akka.camel.ConsumerUnregistered. + * se.scalablesolutions.akka.camel.ConsumerUnregistered, * se.scalablesolutions.akka.camel.ConsumerMethodRegistered and * se.scalablesolutions.akka.camel.ConsumerMethodUnregistered. * @@ -110,7 +110,7 @@ private[camel] case class SetExpectedRegistrationCount(num: Int) private[camel] case class SetExpectedUnregistrationCount(num: Int) /** - * Defines an abstract route to a target which is either an actor or an typed actor method.. + * Abstract route to a target which is either an actor or an typed actor method. * * @param endpointUri endpoint URI of the consumer actor or typed actor method. * @param id actor identifier or typed actor identifier (registry key). @@ -135,9 +135,9 @@ private[camel] abstract class ConsumerRoute(endpointUri: String, id: String) ext } /** - * Defines the route to a consumer actor. + * Defines the route to a (untyped) consumer actor. * - * @param endpointUri endpoint URI of the consumer actor + * @param endpointUri endpoint URI of the (untyped) consumer actor * @param uuid actor uuid * @param blocking true for blocking in-out exchanges, false otherwise * @@ -148,7 +148,7 @@ private[camel] class ConsumerActorRoute(endpointUri: String, uuid: Uuid, blockin } /** - * Defines the route to an typed actor method.. + * Defines the route to a typed actor method. * * @param endpointUri endpoint URI of the consumer actor method * @param id typed actor identifier @@ -162,10 +162,10 @@ private[camel] class ConsumerMethodRoute(val endpointUri: String, id: String, me /** * A registration listener that triggers publication of consumer actors and typed actor - * methods as well as un-publication of consumer actors. This actor needs to be initialized - * with a PublishRequestorInit command message for obtaining a reference to - * a publisher actor. Before initialization it buffers all outbound messages - * and delivers them to the publisher when receiving a + * methods as well as un-publication of consumer actors and typed actor methods. This actor + * needs to be initialized with a PublishRequestorInit command message for + * obtaining a reference to a publisher actor. Before initialization it buffers + * all outbound messages and delivers them to the publisher when receiving a * PublishRequestorInit message. After initialization, outbound messages are * delivered directly without buffering. * @@ -273,7 +273,7 @@ private[camel] case class ConsumerMethodUnregistered(typedActor: AnyRef, init: A */ private[camel] object ConsumerRegistered { /** - * Optionally creates an ConsumerRegistered event message for a consumer actor or None if + * Creates an ConsumerRegistered event message for a consumer actor or None if * actorRef is not a consumer actor. */ def forConsumer(actorRef: ActorRef): Option[ConsumerRegistered] = { @@ -288,7 +288,7 @@ private[camel] object ConsumerRegistered { */ private[camel] object ConsumerUnregistered { /** - * Optionally creates an ConsumerUnregistered event message for a consumer actor or None if + * Creates an ConsumerUnregistered event message for a consumer actor or None if * actorRef is not a consumer actor. */ def forConsumer(actorRef: ActorRef): Option[ConsumerUnregistered] = { @@ -327,8 +327,8 @@ private[camel] object ConsumerMethod { */ private[camel] object ConsumerMethodRegistered { /** - * Creates a list of ConsumerMethodRegistered event messages for an typed actor or an empty - * list if the typed actor is a proxy for an remote typed actor or the typed actor doesn't + * Creates a list of ConsumerMethodRegistered event messages for a typed actor or an empty + * list if the typed actor is a proxy for a remote typed actor or the typed actor doesn't * have any @consume annotated methods. */ def forConsumer(typedActor: AnyRef, init: AspectInit): List[ConsumerMethodRegistered] = { @@ -343,8 +343,8 @@ private[camel] object ConsumerMethodRegistered { */ private[camel] object ConsumerMethodUnregistered { /** - * Creates a list of ConsumerMethodUnregistered event messages for an typed actor or an empty - * list if the typed actor is a proxy for an remote typed actor or the typed actor doesn't + * Creates a list of ConsumerMethodUnregistered event messages for a typed actor or an empty + * list if the typed actor is a proxy for a remote typed actor or the typed actor doesn't * have any @consume annotated methods. */ def forConsumer(typedActor: AnyRef, init: AspectInit): List[ConsumerMethodUnregistered] = { diff --git a/akka-camel/src/main/scala/Message.scala b/akka-camel/src/main/scala/Message.scala index a834568a22..d5cfd23faf 100644 --- a/akka-camel/src/main/scala/Message.scala +++ b/akka-camel/src/main/scala/Message.scala @@ -10,7 +10,7 @@ import org.apache.camel.util.ExchangeHelper /** * An immutable representation of a Camel message. Actor classes that mix in * se.scalablesolutions.akka.camel.Producer or - * se.scalablesolutions.akka.camel.Consumer use this message type for communication. + * se.scalablesolutions.akka.camel.Consumer usually use this message type for communication. * * @author Martin Krasser */ @@ -264,8 +264,8 @@ class CamelMessageAdapter(val cm: CamelMessage) { /** * Defines conversion methods to CamelExchangeAdapter and CamelMessageAdapter. - * Imported by applications - * that implicitly want to use conversion methods of CamelExchangeAdapter and CamelMessageAdapter. + * Imported by applications that implicitly want to use conversion methods of + * CamelExchangeAdapter and CamelMessageAdapter. */ object CamelMessageConversion { diff --git a/akka-camel/src/main/scala/Producer.scala b/akka-camel/src/main/scala/Producer.scala index 0be07e9737..3313c1d0be 100644 --- a/akka-camel/src/main/scala/Producer.scala +++ b/akka-camel/src/main/scala/Producer.scala @@ -24,7 +24,7 @@ trait ProducerSupport { this: Actor => private val headersToCopyDefault = Set(Message.MessageExchangeId) /** - * Endpoint object resolved from current CamelContext with + * Endpoint object resolved from the current CamelContext with * endpointUri. */ private lazy val endpoint = CamelContextManager.context.getEndpoint(endpointUri) @@ -36,8 +36,8 @@ trait ProducerSupport { this: Actor => /** * If set to false (default), this producer expects a response message from the Camel endpoint. - * If set to true, this producer communicates with the Camel endpoint with an in-only message - * exchange pattern (fire and forget). + * If set to true, this producer initiates an in-only message exchange with the Camel endpoint + * (fire and forget). */ def oneway: Boolean = false @@ -62,13 +62,17 @@ trait ProducerSupport { this: Actor => } /** - * Produces msg as exchange of given pattern to the endpoint specified by - * endpointUri. After producing to the endpoint the processing result is passed as argument - * to receiveAfterProduce. If the result was returned synchronously by the endpoint then - * receiveAfterProduce is called synchronously as well. If the result was returned asynchronously, - * the receiveAfterProduce is called asynchronously as well. This is done by wrapping the result, - * adding it to this producers mailbox, unwrapping it once it is received and calling - * receiveAfterProduce. The original sender and senderFuture are thereby preserved. + * Initiates a message exchange of given pattern with the endpoint specified by + * endpointUri. The in-message of the initiated exchange is the canonical form + * of msg. After sending the in-message, the processing result (response) is passed + * as argument to receiveAfterProduce. If the response is received synchronously from + * the endpoint then receiveAfterProduce is called synchronously as well. If the + * response is received asynchronously, the receiveAfterProduce is called + * asynchronously. This is done by wrapping the response, adding it to this producers + * mailbox, unwrapping it and calling receiveAfterProduce. The original + * sender and senderFuture are thereby preserved. + * + * @see Message#canonicalize(Any) * * @param msg message to produce * @param pattern exchange pattern @@ -106,8 +110,8 @@ trait ProducerSupport { this: Actor => /** * Produces msg to the endpoint specified by endpointUri. Before the message is - * actually produced it is pre-processed by calling receiveBeforeProduce. If oneway - * is true an in-only message exchange is initiated, otherwise an in-out message exchange. + * actually sent it is pre-processed by calling receiveBeforeProduce. If oneway + * is true, an in-only message exchange is initiated, otherwise an in-out message exchange. * * @see Producer#produce(Any, ExchangePattern) */ @@ -132,17 +136,18 @@ trait ProducerSupport { this: Actor => } /** - * Called after the a result was received from the endpoint specified by endpointUri. The - * result is passed as argument. By default, this method replies the result back to the original sender - * if oneway is false. If oneway is true then nothing is done. This method may - * be overridden by subtraits or subclasses. + * Called after a response was received from the endpoint specified by endpointUri. The + * response is passed as argument. By default, this method sends the response back to the original sender + * if oneway is false. If oneway is true, nothing is + * done. This method may be overridden by subtraits or subclasses (e.g. to forward responses to another + * actor). */ protected def receiveAfterProduce: Receive = { case msg => if (!oneway) self.reply(msg) } /** - * Creates a new Exchange with given pattern from the endpoint specified by + * Creates a new Exchange of given pattern from the endpoint specified by * endpointUri. */ private def createExchange(pattern: ExchangePattern): Exchange = endpoint.createExchange(pattern) @@ -158,25 +163,26 @@ trait ProducerSupport { this: Actor => } /** - * Mixed in by Actor implementations that produce messages to Camel endpoints. + * Mixed in by Actor implementations to produce messages to Camel endpoints. */ trait Producer extends ProducerSupport { this: Actor => /** - * Default implementation of Actor.receive + * Default implementation of Actor.receive. Any messages received by this actors + * will be produced to the endpoint specified by endpointUri. */ protected def receive = produce } /** - * Java-friendly {@link ProducerSupport} inherited by {@link UntypedProducerActor} implementations. + * Java-friendly ProducerSupport. + * + * @see UntypedProducerActor * * @author Martin Krasser */ trait UntypedProducer extends ProducerSupport { this: UntypedActor => - final override def endpointUri = getEndpointUri - final override def oneway = isOneway final override def receiveBeforeProduce = { @@ -213,10 +219,10 @@ trait UntypedProducer extends ProducerSupport { this: UntypedActor => def onReceiveBeforeProduce(message: Any): Any = super.receiveBeforeProduce(message) /** - * Called after the a result was received from the endpoint specified by getEndpointUri. The - * result is passed as argument. By default, this method replies the result back to the original sender - * if isOneway returns false. If isOneway returns true then nothing is done. This - * method may be overridden by subclasses. + * Called after a response was received from the endpoint specified by endpointUri. The + * response is passed as argument. By default, this method sends the response back to the original sender + * if oneway is false. If oneway is true, nothing is + * done. This method may be overridden by subclasses (e.g. to forward responses to another actor). */ @throws(classOf[Exception]) def onReceiveAfterProduce(message: Any): Unit = super.receiveAfterProduce(message) diff --git a/akka-camel/src/main/scala/component/ActorComponent.scala b/akka-camel/src/main/scala/component/ActorComponent.scala index 57afd8d500..297a4c3a84 100644 --- a/akka-camel/src/main/scala/component/ActorComponent.scala +++ b/akka-camel/src/main/scala/component/ActorComponent.scala @@ -45,12 +45,13 @@ class ActorComponent extends DefaultComponent { } /** - * Camel endpoint for referencing an (untyped) actor. The actor reference is given by the endpoint URI. - * An actor can be referenced by its ActorRef.id or its ActorRef.uuid. - * Supported endpoint URI formats are - * actor:<actorid>, - * actor:id:<actorid> and - * actor:uuid:<actoruuid>. + * Camel endpoint for sending messages to and receiving replies from (untyped) actors. Actors + * are referenced using actor endpoint URIs of the following format: + * actor:, + * actor:id: and + * actor:uuid:, + * where actor-id refers to ActorRef.id and actor-uuid + * refers to the String-representation od ActorRef.uuid. * * @see se.scalablesolutions.akka.camel.component.ActorComponent * @see se.scalablesolutions.akka.camel.component.ActorProducer @@ -63,8 +64,9 @@ class ActorEndpoint(uri: String, val uuid: Option[Uuid]) extends DefaultEndpoint(uri, comp) { /** - * Blocking of caller thread during two-way message exchanges with consumer actors. This is set - * via the blocking=true|false endpoint URI parameter. If omitted blocking is false. + * Whether to block caller thread during two-way message exchanges with (untyped) actors. This is + * set via the blocking=true|false endpoint URI parameter. Default value is + * false. */ @BeanProperty var blocking: Boolean = false @@ -86,9 +88,18 @@ class ActorEndpoint(uri: String, } /** - * Sends the in-message of an exchange to an (untyped) actor. If the exchange pattern is out-capable and - * blocking is enabled then the producer waits for a reply (using the !! operator), - * otherwise the ! operator is used for sending the message. + * Sends the in-message of an exchange to an (untyped) actor. + * * * @see se.scalablesolutions.akka.camel.component.ActorComponent * @see se.scalablesolutions.akka.camel.component.ActorEndpoint @@ -183,11 +194,11 @@ private[akka] object AsyncCallbackAdapter { } /** - * Adapts an AsyncCallback to ActorRef.!. Used by other actors to reply - * asynchronously to Camel with ActorRef.reply. + * Adapts an ActorRef to a Camel AsyncCallback. Used by receiving actors to reply + * asynchronously to Camel routes with ActorRef.reply. *

* Please note that this adapter can only be used locally at the moment which should not - * be a problem is most situations as Camel endpoints are only activated for local actor references, + * be a problem is most situations since Camel endpoints are only activated for local actor references, * never for remote references. * * @author Martin Krasser @@ -204,8 +215,9 @@ private[akka] class AsyncCallbackAdapter(exchange: Exchange, callback: AsyncCall } /** - * Writes the reply message to exchange and uses callback to - * generate completion notifications. + * Populates the initial exchange with the reply message and uses the + * callback handler to notify Camel about the asynchronous completion of the message + * exchange. * * @param message reply message * @param sender ignored diff --git a/akka-camel/src/main/scala/component/TypedActorComponent.scala b/akka-camel/src/main/scala/component/TypedActorComponent.scala index 2a48cf9fc4..542705d0c6 100644 --- a/akka-camel/src/main/scala/component/TypedActorComponent.scala +++ b/akka-camel/src/main/scala/component/TypedActorComponent.scala @@ -21,7 +21,7 @@ object TypedActorComponent { /** * Camel component for exchanging messages with typed actors. This component - * tries to obtain the typed actor from the typedActorRegistry + * tries to obtain the typed actor from its typedActorRegistry * first. If it's not there it tries to obtain it from the CamelContext's registry. * * @see org.apache.camel.component.bean.BeanComponent @@ -32,9 +32,9 @@ class TypedActorComponent extends BeanComponent { val typedActorRegistry = new ConcurrentHashMap[String, AnyRef] /** - * Creates a {@link org.apache.camel.component.bean.BeanEndpoint} with a custom - * bean holder that uses typedActorRegistry for getting access to - * typed actors (beans). + * Creates an org.apache.camel.component.bean.BeanEndpoint with a custom + * bean holder that uses typedActorRegistry for getting access to typed + * actors (beans). * * @see se.scalablesolutions.akka.camel.component.TypedActorHolder */ @@ -51,7 +51,7 @@ class TypedActorComponent extends BeanComponent { } /** - * {@link org.apache.camel.component.bean.BeanHolder} implementation that uses a custom + * org.apache.camel.component.bean.BeanHolder implementation that uses a custom * registry for getting access to typed actors. * * @author Martin Krasser @@ -60,13 +60,16 @@ class TypedActorHolder(typedActorRegistry: Map[String, AnyRef], context: CamelCo extends RegistryBean(context, name) { /** - * Returns an {@link se.scalablesolutions.akka.camel.component.TypedActorInfo} instance. + * Returns an se.scalablesolutions.akka.camel.component.TypedActorInfo instance. */ override def getBeanInfo: BeanInfo = new TypedActorInfo(getContext, getBean.getClass, getParameterMappingStrategy) /** - * Obtains an typed actor from typedActorRegistry. + * Obtains a typed actor from typedActorRegistry. If the typed actor cannot + * be found then this method tries to obtain the actor from the CamelContext's registry. + * + * @return a typed actor or null. */ override def getBean: AnyRef = { val bean = typedActorRegistry.get(getName) @@ -75,7 +78,7 @@ class TypedActorHolder(typedActorRegistry: Map[String, AnyRef], context: CamelCo } /** - * Provides typed actor meta information. + * Typed actor meta information. * * @author Martin Krasser */